Skip to content

Commit d26e32b

Browse files
committed
[SPARK-51967] Use discardableResult to prevent unnecessary warnings
### What changes were proposed in this pull request? This PR aims to use `discardableResult` to prevent unnecessary warnings. ### Why are the changes needed? To simplify the `Spark Connect Swift` API usages. For example, `count()` API is used to execute an INSERT query. By adding `discardableResult`, we can make it clear that the result is discadable. ```swift discardableResult public func count() async throws -> Int64 { ``` ### Does this PR introduce _any_ user-facing change? This is a change to the unreleased version. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #95 from dongjoon-hyun/SPARK-51967. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent ca9dec5 commit d26e32b

17 files changed

+47
-34
lines changed

Sources/SparkConnect/Catalog.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ public actor Catalog: Sendable {
114114
catalog.catType = .setCurrentCatalog(setCurrentCatalog)
115115
return catalog
116116
})
117-
_ = try await df.count()
117+
try await df.count()
118118
}
119119

120120
/// Returns a list of catalogs in this session.
@@ -156,7 +156,7 @@ public actor Catalog: Sendable {
156156
catalog.catType = .setCurrentDatabase(setCurrentDatabase)
157157
return catalog
158158
})
159-
_ = try await df.count()
159+
try await df.count()
160160
}
161161

162162
/// Returns a list of databases available across all sessions.

Sources/SparkConnect/DataFrame.swift

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@ public actor DataFrame: Sendable {
136136

137137
/// Return the total number of rows.
138138
/// - Returns: a `Int64` value.
139+
@discardableResult
139140
public func count() async throws -> Int64 {
140141
let counter = Atomic(Int64(0))
141142

@@ -440,6 +441,7 @@ public actor DataFrame: Sendable {
440441

441442
/// Persist this `DataFrame` with the given storage level.
442443
/// - Parameter storageLevel: A storage level to apply.
444+
@discardableResult
443445
public func persist(storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK) async throws
444446
-> DataFrame
445447
{
@@ -456,6 +458,7 @@ public actor DataFrame: Sendable {
456458
/// This will not un-persist any cached data that is built upon this `DataFrame`.
457459
/// - Parameter blocking: Whether to block until all blocks are deleted.
458460
/// - Returns: A `DataFrame`
461+
@discardableResult
459462
public func unpersist(blocking: Bool = false) async throws -> DataFrame {
460463
try await withGPRC { client in
461464
let service = Spark_Connect_SparkConnectService.Client(wrapping: client)

Sources/SparkConnect/DataFrameReader.swift

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,10 +85,11 @@ public actor DataFrameReader: Sendable {
8585
/// the schema inference step, and thus speed up data loading.
8686
/// - Parameter schema: A DDL schema string.
8787
/// - Returns: A `DataFrameReader`.
88+
@discardableResult
8889
public func schema(_ schema: String) async throws -> DataFrameReader {
8990
// Validate by parsing.
9091
do {
91-
_ = try await sparkSession.client.ddlParse(schema)
92+
try await sparkSession.client.ddlParse(schema)
9293
} catch {
9394
throw SparkConnectError.InvalidTypeException
9495
}

Sources/SparkConnect/DataFrameWriter.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ public actor DataFrameWriter: Sendable {
177177
var command = Spark_Connect_Command()
178178
command.writeOperation = write
179179

180-
_ = try await df.spark.client.execute(df.spark.sessionID, command)
180+
try await df.spark.client.execute(df.spark.sessionID, command)
181181
}
182182

183183
/// Saves the content of the `DataFrame` in CSV format at the specified path.

Sources/SparkConnect/DataFrameWriterV2.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,6 +147,6 @@ public actor DataFrameWriterV2: Sendable {
147147

148148
var command = Spark_Connect_Command()
149149
command.writeOperationV2 = write
150-
_ = try await df.spark.client.execute(df.spark.sessionID, command)
150+
try await df.spark.client.execute(df.spark.sessionID, command)
151151
}
152152
}

Sources/SparkConnect/RuntimeConf.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,14 +32,14 @@ public actor RuntimeConf {
3232
/// - key: A string for the configuration key.
3333
/// - value: A string for the configuration value.
3434
public func set(_ key: String, _ value: String) async throws {
35-
_ = try await client.setConf(map: [key: value])
35+
try await client.setConf(map: [key: value])
3636
}
3737

3838
/// Reset a configuration.
3939
/// - Parameters:
4040
/// - key: A string for the configuration key.
4141
public func unset(_ key: String) async throws {
42-
_ = try await client.unsetConf(keys: [key])
42+
try await client.unsetConf(keys: [key])
4343
}
4444

4545
/// Get a configuration.

Sources/SparkConnect/SparkConnectClient.swift

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ public actor SparkConnectClient {
8787
/// As a test connection, this sends the server `SparkVersion` request.
8888
/// - Parameter sessionID: A string for the session ID.
8989
/// - Returns: An `AnalyzePlanResponse` instance for `SparkVersion`
90+
@discardableResult
9091
func connect(_ sessionID: String) async throws -> AnalyzePlanResponse {
9192
try await withGPRC { client in
9293
// To prevent server-side `INVALID_HANDLE.FORMAT (SQLSTATE: HY000)` exception.
@@ -137,14 +138,15 @@ public actor SparkConnectClient {
137138
/// Request the server to set a map of configurations for this session.
138139
/// - Parameter map: A map of key-value pairs to set.
139140
/// - Returns: Always return true.
141+
@discardableResult
140142
func setConf(map: [String: String]) async throws -> Bool {
141143
try await withGPRC { client in
142144
let service = SparkConnectService.Client(wrapping: client)
143145
var request = getConfigRequestSet(map: map)
144146
request.clientType = clientType
145147
request.userContext = userContext
146148
request.sessionID = self.sessionID!
147-
let _ = try await service.config(request)
149+
_ = try await service.config(request)
148150
return true
149151
}
150152
}
@@ -160,7 +162,11 @@ public actor SparkConnectClient {
160162
request.operation.opType = .unset(unset)
161163
return request
162164
}
163-
165+
166+
/// Request the server to unset keys
167+
/// - Parameter keys: An array of keys
168+
/// - Returns: Always return true
169+
@discardableResult
164170
func unsetConf(keys: [String]) async throws -> Bool {
165171
try await withGPRC { client in
166172
let service = SparkConnectService.Client(wrapping: client)
@@ -509,6 +515,7 @@ public actor SparkConnectClient {
509515
self.result.append(response)
510516
}
511517

518+
@discardableResult
512519
func execute(_ sessionID: String, _ command: Command) async throws -> [ExecutePlanResponse] {
513520
self.result.removeAll()
514521
try await withGPRC { client in
@@ -555,6 +562,7 @@ public actor SparkConnectClient {
555562
/// Parse a DDL string to ``Spark_Connect_DataType`` instance.
556563
/// - Parameter ddlString: A string to parse.
557564
/// - Returns: A ``Spark_Connect_DataType`` instance.
565+
@discardableResult
558566
func ddlParse(_ ddlString: String) async throws -> Spark_Connect_DataType {
559567
try await withGPRC { client in
560568
let service = SparkConnectService.Client(wrapping: client)

Sources/SparkConnect/SparkFileUtils.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ public enum SparkFileUtils {
8585
static func createDirectory(root: String, namePrefix: String = "spark") -> URL {
8686
let tempDir = URL(fileURLWithPath: root).appendingPathComponent(
8787
"\(namePrefix)-\(UUID().uuidString)")
88-
_ = createDirectory(at: tempDir)
88+
createDirectory(at: tempDir)
8989
return tempDir
9090
}
9191

Sources/SparkConnect/SparkSession.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,7 @@ public actor SparkSession {
201201

202202
/// Remove all stored configurations.
203203
/// - Returns: self
204+
@discardableResult
204205
func clear() -> Builder {
205206
sparkConf.removeAll()
206207
return self

Tests/SparkConnectTests/BuilderTests.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ struct BuilderTests {
4242
// Don't try to connect
4343
let builder = await SparkSession.builder.remote("sc://spark:1234")
4444
#expect(await builder.sparkConf["spark.remote"] == "sc://spark:1234")
45-
_ = await builder.clear()
45+
await builder.clear()
4646
}
4747

4848
@Test

0 commit comments

Comments
 (0)