Skip to content

Commit f715923

Browse files
committed
[SPARK-52318] Refactor SparkConnectError to simplify case names
### What changes were proposed in this pull request? This PR aims to refactor `SparkConnectError` to simplify case names. ### Why are the changes needed? We had better simplify case names by removing `Exception` postfix before adding more errors like `CatalogNotFound` and `SchemaNotFound`. In `Swift`, we use like the following with `SparkConnectError.` prefix. ```swift - throw SparkConnectError.UnsupportedOperationException + throw SparkConnectError.UnsupportedOperation ``` ### Does this PR introduce _any_ user-facing change? Yes, but it's the exception names for 0.x release. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #177 from dongjoon-hyun/SPARK-52318. Authored-by: Dongjoon Hyun <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent af7b0c9 commit f715923

16 files changed

+32
-32
lines changed

Sources/SparkConnect/DataFrame.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -258,7 +258,7 @@ public actor DataFrame: Sendable {
258258
// SQLSTATE: 0A000
259259
// [UNSUPPORTED_CONNECT_FEATURE.RDD]
260260
// Feature is not supported in Spark Connect: Resilient Distributed Datasets (RDDs).
261-
throw SparkConnectError.UnsupportedOperationException
261+
throw SparkConnectError.UnsupportedOperation
262262
}
263263

264264
/// Return an array of column name strings

Sources/SparkConnect/Extension.swift

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,7 @@ extension YearMonthInterval {
213213
case 0: "year"
214214
case 1: "month"
215215
default:
216-
throw SparkConnectError.InvalidTypeException
216+
throw SparkConnectError.InvalidType
217217
}
218218
}
219219

@@ -225,7 +225,7 @@ extension YearMonthInterval {
225225
} else if startFieldName < endFieldName {
226226
"interval \(startFieldName) to \(endFieldName)"
227227
} else {
228-
throw SparkConnectError.InvalidTypeException
228+
throw SparkConnectError.InvalidType
229229
}
230230
return interval
231231
}
@@ -239,7 +239,7 @@ extension DayTimeInterval {
239239
case 2: "minute"
240240
case 3: "second"
241241
default:
242-
throw SparkConnectError.InvalidTypeException
242+
throw SparkConnectError.InvalidType
243243
}
244244
}
245245

@@ -251,7 +251,7 @@ extension DayTimeInterval {
251251
} else if startFieldName < endFieldName {
252252
"interval \(startFieldName) to \(endFieldName)"
253253
} else {
254-
throw SparkConnectError.InvalidTypeException
254+
throw SparkConnectError.InvalidType
255255
}
256256
return interval
257257
}
@@ -325,7 +325,7 @@ extension DataType {
325325
case .unparsed:
326326
self.unparsed.dataTypeString
327327
default:
328-
throw SparkConnectError.InvalidTypeException
328+
throw SparkConnectError.InvalidType
329329
}
330330
}
331331
}

Sources/SparkConnect/MergeIntoWriter.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ public actor MergeIntoWriter {
187187
&& self.mergeIntoTableCommand.notMatchedActions.count == 0
188188
&& self.mergeIntoTableCommand.notMatchedBySourceActions.count == 0
189189
{
190-
throw SparkConnectError.InvalidArgumentException
190+
throw SparkConnectError.InvalidArgument
191191
}
192192
self.mergeIntoTableCommand.sourceTablePlan = await (self.df.getPlan() as! Plan).root
193193
self.mergeIntoTableCommand.withSchemaEvolution = self.schemaEvolution

Sources/SparkConnect/ProtoUtils.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,10 @@ public enum ProtoUtils {
3030
// because the Spark Connect job tag is also used as part of SparkContext job tag.
3131
// See SparkContext.throwIfInvalidTag and ExecuteHolderSessionTag
3232
if tag.isEmpty {
33-
throw SparkConnectError.InvalidArgumentException
33+
throw SparkConnectError.InvalidArgument
3434
}
3535
if tag.contains(SPARK_JOB_TAGS_SEP) {
36-
throw SparkConnectError.InvalidArgumentException
36+
throw SparkConnectError.InvalidArgument
3737
}
3838
}
3939
}

Sources/SparkConnect/Row.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ public struct Row: Sendable, Equatable {
4545

4646
public func get(_ i: Int) throws -> Sendable {
4747
if i < 0 || i >= self.length {
48-
throw SparkConnectError.InvalidArgumentException
48+
throw SparkConnectError.InvalidArgument
4949
}
5050
return values[i]
5151
}

Sources/SparkConnect/SparkConnectClient.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ public actor SparkConnectClient {
104104
try await withGPRC { client in
105105
// To prevent server-side `INVALID_HANDLE.FORMAT (SQLSTATE: HY000)` exception.
106106
if UUID(uuidString: sessionID) == nil {
107-
throw SparkConnectError.InvalidSessionIDException
107+
throw SparkConnectError.InvalidSessionID
108108
}
109109

110110
self.sessionID = sessionID
@@ -787,7 +787,7 @@ public actor SparkConnectClient {
787787
} catch let error as RPCError where error.code == .internalError {
788788
switch error.message {
789789
case let m where m.contains("UNSUPPORTED_DATATYPE") || m.contains("INVALID_IDENTIFIER"):
790-
throw SparkConnectError.InvalidTypeException
790+
throw SparkConnectError.InvalidType
791791
default:
792792
throw error
793793
}

Sources/SparkConnect/SparkConnectError.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919

2020
/// A enum for ``SparkConnect`` package errors
2121
public enum SparkConnectError: Error {
22-
case UnsupportedOperationException
23-
case InvalidArgumentException
24-
case InvalidSessionIDException
25-
case InvalidTypeException
22+
case InvalidArgument
23+
case InvalidSessionID
24+
case InvalidType
25+
case UnsupportedOperation
2626
}

Sources/SparkConnect/SparkFileUtils.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ public enum SparkFileUtils {
106106
if fileManager.fileExists(atPath: url.path) {
107107
try fileManager.removeItem(at: url)
108108
} else {
109-
throw SparkConnectError.InvalidArgumentException
109+
throw SparkConnectError.InvalidArgument
110110
}
111111
}
112112
}

Sources/SparkConnect/SparkSession.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ public actor SparkSession {
8383
// SQLSTATE: 0A000
8484
// [UNSUPPORTED_CONNECT_FEATURE.SESSION_SPARK_CONTEXT]
8585
// Feature is not supported in Spark Connect: Access to the SparkContext.
86-
throw SparkConnectError.UnsupportedOperationException
86+
throw SparkConnectError.UnsupportedOperation
8787
}
8888
}
8989

Sources/SparkConnect/StreamingQueryManager.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ public actor StreamingQueryManager {
135135
let response = try await self.sparkSession.client.executeStreamingQueryManagerCommand(command)
136136
let query = response.first!.streamingQueryManagerCommandResult.query
137137
guard query.hasID else {
138-
throw SparkConnectError.InvalidArgumentException
138+
throw SparkConnectError.InvalidArgument
139139
}
140140
return StreamingQuery(
141141
UUID(uuidString: query.id.id)!,
@@ -154,7 +154,7 @@ public actor StreamingQueryManager {
154154
var awaitAnyTerminationCommand = StreamingQueryManagerCommand.AwaitAnyTerminationCommand()
155155
if let timeoutMs {
156156
guard timeoutMs > 0 else {
157-
throw SparkConnectError.InvalidArgumentException
157+
throw SparkConnectError.InvalidArgument
158158
}
159159
awaitAnyTerminationCommand.timeoutMs = timeoutMs
160160
}

0 commit comments

Comments
 (0)