Skip to content

Commit 9e2a681

Browse files
committed
Resolve conflicts
1 parent b0a3541 commit 9e2a681

File tree

4 files changed

+15
-7
lines changed

4 files changed

+15
-7
lines changed

crates/iceberg/src/arrow/reader.rs

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,9 @@ use parquet::arrow::arrow_reader::{
3939
};
4040
use parquet::arrow::async_reader::AsyncFileReader;
4141
use parquet::arrow::{PARQUET_FIELD_ID_META_KEY, ParquetRecordBatchStreamBuilder, ProjectionMask};
42-
use parquet::file::metadata::{ParquetMetaData, ParquetMetaDataReader, RowGroupMetaData};
42+
use parquet::file::metadata::{
43+
PageIndexPolicy, ParquetMetaData, ParquetMetaDataReader, RowGroupMetaData,
44+
};
4345
use parquet::schema::types::{SchemaDescriptor, Type as ParquetType};
4446

4547
use crate::arrow::caching_delete_file_loader::CachingDeleteFileLoader;
@@ -1390,9 +1392,9 @@ impl<R: FileRead> AsyncFileReader for ArrowFileReader<R> {
13901392
async move {
13911393
let reader = ParquetMetaDataReader::new()
13921394
.with_prefetch_hint(self.metadata_size_hint)
1393-
.with_column_indexes(self.preload_column_index)
1394-
.with_page_indexes(self.preload_page_index)
1395-
.with_offset_indexes(self.preload_offset_index);
1395+
.with_column_index_policy(PageIndexPolicy::from(self.preload_column_index))
1396+
.with_page_index_policy(PageIndexPolicy::from(self.preload_page_index))
1397+
.with_offset_index_policy(PageIndexPolicy::from(self.preload_offset_index));
13961398
let size = self.meta.size;
13971399
let meta = reader.load_and_finish(self, size).await?;
13981400

crates/integrations/datafusion/src/physical_plan/commit.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,10 @@ impl IcebergCommitExec {
8484
let count_array = Arc::new(UInt64Array::from(vec![count])) as ArrayRef;
8585

8686
RecordBatch::try_from_iter_with_nullable(vec![("count", count_array, false)]).map_err(|e| {
87-
DataFusionError::ArrowError(e, Some("Failed to make count batch!".to_string()))
87+
DataFusionError::ArrowError(
88+
Box::new(e),
89+
Some("Failed to make count batch!".to_string()),
90+
)
8891
})
8992
}
9093

crates/integrations/datafusion/src/physical_plan/write.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,10 @@ impl IcebergWriteExec {
100100
let files_array = Arc::new(StringArray::from(data_files)) as ArrayRef;
101101

102102
RecordBatch::try_new(Self::make_result_schema(), vec![files_array]).map_err(|e| {
103-
DataFusionError::ArrowError(e, Some("Failed to make result batch".to_string()))
103+
DataFusionError::ArrowError(
104+
Box::new(e),
105+
Some("Failed to make result batch".to_string()),
106+
)
104107
})
105108
}
106109

crates/integrations/datafusion/src/table/table_provider_factory.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,7 @@ mod tests {
241241
options: Default::default(),
242242
table_partition_cols: Default::default(),
243243
order_exprs: Default::default(),
244-
constraints: Constraints::empty(),
244+
constraints: Constraints::default(),
245245
column_defaults: Default::default(),
246246
if_not_exists: Default::default(),
247247
temporary: false,

0 commit comments

Comments
 (0)