Skip to content

Commit fb233e2

Browse files
committed
[SPARK-52678] Update ArrowReader.swift with GH-54
1 parent ff417c4 commit fb233e2

File tree

1 file changed

+31
-34
lines changed

1 file changed

+31
-34
lines changed

Sources/SparkConnect/ArrowReader.swift

Lines changed: 31 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -274,23 +274,22 @@ public class ArrowReader { // swiftlint:disable:this type_body_length
274274
let message = org_apache_arrow_flatbuf_Message.getRootAsMessage(bb: dataBuffer)
275275
switch message.headerType {
276276
case .recordbatch:
277-
do {
278-
let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)!
279-
let recordBatch = try loadRecordBatch(
280-
rbMessage,
281-
schema: schemaMessage!,
282-
arrowSchema: result.schema!,
283-
data: input,
284-
messageEndOffset: (Int64(offset) + Int64(length))
285-
).get()
277+
let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)!
278+
let recordBatchResult = try loadRecordBatch(
279+
rbMessage,
280+
schema: schemaMessage!,
281+
arrowSchema: result.schema!,
282+
data: input,
283+
messageEndOffset: (Int64(offset) + Int64(length))
284+
)
285+
switch recordBatchResult {
286+
case .success(let recordBatch):
286287
result.batches.append(recordBatch)
287-
offset += Int(message.bodyLength + Int64(length))
288-
length = getUInt32(input, offset: offset)
289-
} catch let error as ArrowError {
288+
case .failure(let error):
290289
return .failure(error)
291-
} catch {
292-
return .failure(.unknownError("Unexpected error: \(error)"))
293290
}
291+
offset += Int(message.bodyLength + Int64(length))
292+
length = getUInt32(input, offset: offset)
294293
case .schema:
295294
schemaMessage = message.header(type: org_apache_arrow_flatbuf_Schema.self)!
296295
let schemaResult = loadSchema(schemaMessage!)
@@ -363,20 +362,19 @@ public class ArrowReader { // swiftlint:disable:this type_body_length
363362
let message = org_apache_arrow_flatbuf_Message.getRootAsMessage(bb: mbb)
364363
switch message.headerType {
365364
case .recordbatch:
366-
do {
367-
let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)!
368-
let recordBatch = try loadRecordBatch(
369-
rbMessage,
370-
schema: footer.schema!,
371-
arrowSchema: result.schema!,
372-
data: fileData,
373-
messageEndOffset: messageEndOffset
374-
).get()
365+
let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)!
366+
let recordBatchResult = try loadRecordBatch(
367+
rbMessage,
368+
schema: footer.schema!,
369+
arrowSchema: result.schema!,
370+
data: fileData,
371+
messageEndOffset: messageEndOffset
372+
)
373+
switch recordBatchResult {
374+
case .success(let recordBatch):
375375
result.batches.append(recordBatch)
376-
} catch let error as ArrowError {
376+
case .failure(let error):
377377
return .failure(error)
378-
} catch {
379-
return .failure(.unknownError("Unexpected error: \(error)"))
380378
}
381379
default:
382380
return .failure(.unknownError("Unhandled header type: \(message.headerType)"))
@@ -429,17 +427,16 @@ public class ArrowReader { // swiftlint:disable:this type_body_length
429427
}
430428
case .recordbatch:
431429
let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)!
432-
do {
433-
let recordBatch = try loadRecordBatch(
434-
rbMessage, schema: result.messageSchema!, arrowSchema: result.schema!,
435-
data: dataBody, messageEndOffset: 0
436-
).get()
430+
let recordBatchResult = try loadRecordBatch(
431+
rbMessage, schema: result.messageSchema!, arrowSchema: result.schema!,
432+
data: dataBody, messageEndOffset: 0
433+
)
434+
switch recordBatchResult {
435+
case .success(let recordBatch):
437436
result.batches.append(recordBatch)
438437
return .success(())
439-
} catch let error as ArrowError {
438+
case .failure(let error):
440439
return .failure(error)
441-
} catch {
442-
return .failure(.unknownError("Unexpected error: \(error)"))
443440
}
444441
default:
445442
return .failure(.unknownError("Unhandled header type: \(message.headerType)"))

0 commit comments

Comments
 (0)