Skip to content

Commit d367a7c

Browse files
committed
minor
1 parent f52a698 commit d367a7c

File tree

1 file changed

+3
-4
lines changed
  • crates/integrations/datafusion/src/physical_plan

1 file changed

+3
-4
lines changed

crates/integrations/datafusion/src/physical_plan/write.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ use std::any::Any;
1919
use std::fmt::{Debug, Formatter};
2020
use std::sync::Arc;
2121

22-
use datafusion::arrow::array::{ArrayRef, RecordBatch, StringArray, UInt64Array};
22+
use datafusion::arrow::array::{ArrayRef, RecordBatch, StringArray, StructArray, UInt64Array};
2323
use datafusion::arrow::datatypes::{
2424
DataType, Field, Schema as ArrowSchema, SchemaRef as ArrowSchemaRef,
2525
};
@@ -74,15 +74,14 @@ impl IcebergWriteExec {
7474
}
7575

7676
// Create a record batch with count and serialized data files
77-
fn make_result_batch(count: u64, data_files: Vec<String>) -> RecordBatch {
77+
fn make_result_batch(count: u64, data_files: Vec<String>) -> DFResult<RecordBatch> {
7878
let count_array = Arc::new(UInt64Array::from(vec![count])) as ArrayRef;
7979
let files_array = Arc::new(StringArray::from(data_files)) as ArrayRef;
8080

8181
RecordBatch::try_from_iter_with_nullable(vec![
8282
("count", count_array, false),
8383
("data_files", files_array, false),
8484
])
85-
.unwrap()
8685
}
8786

8887
fn make_result_schema() -> ArrowSchemaRef {
@@ -199,7 +198,7 @@ impl ExecutionPlan for IcebergWriteExec {
199198
})
200199
.collect::<Vec<String>>();
201200

202-
Ok(Self::make_result_batch(count, data_files))
201+
Ok(Self::make_result_batch(count, data_files)?)
203202
})
204203
.boxed();
205204

0 commit comments

Comments
 (0)