Skip to content

Commit 169eb66

Browse files
authored
upgrade to DF50 (#87)
1 parent 540f29e commit 169eb66

File tree

3 files changed

+18
-17
lines changed

3 files changed

+18
-17
lines changed

Cargo.toml

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -28,19 +28,19 @@ keywords = ["arrow", "arrow-rs", "datafusion"]
2828
rust-version = "1.85.1"
2929

3030
[dependencies]
31-
arrow = "55.2.0"
32-
arrow-schema = "55.2.0"
33-
async-trait = "0.1"
31+
arrow = "56.0.0"
32+
arrow-schema = "56.0.0"
33+
async-trait = "0.1.89"
3434
dashmap = "6"
35-
datafusion = "49.0.2"
36-
datafusion-common = "49.0.2"
37-
datafusion-expr = "49.0.2"
38-
datafusion-functions = "49.0.2"
39-
datafusion-functions-aggregate = "49.0.2"
40-
datafusion-optimizer = "49.0.2"
41-
datafusion-physical-expr = "49.0.2"
42-
datafusion-physical-plan = "49.0.2"
43-
datafusion-sql = "49.0.2"
35+
datafusion = "50"
36+
datafusion-common = "50"
37+
datafusion-expr = "50"
38+
datafusion-functions = "50"
39+
datafusion-functions-aggregate = "50"
40+
datafusion-optimizer = "50"
41+
datafusion-physical-expr = "50"
42+
datafusion-physical-plan = "50"
43+
datafusion-sql = "50"
4444
futures = "0.3"
4545
itertools = "0.14"
4646
log = "0.4"

src/materialized/file_metadata.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,9 @@
1717

1818
use arrow::array::{StringBuilder, TimestampNanosecondBuilder, UInt64Builder};
1919
use arrow::record_batch::RecordBatch;
20-
use arrow_schema::{DataType, Field, Schema, SchemaRef, TimeUnit};
20+
use arrow_schema::{DataType, Field, TimeUnit};
2121
use async_trait::async_trait;
22+
use datafusion::arrow::datatypes::{Schema, SchemaRef};
2223
use datafusion::catalog::SchemaProvider;
2324
use datafusion::catalog::{CatalogProvider, Session};
2425
use datafusion::datasource::listing::ListingTableUrl;
@@ -35,7 +36,7 @@ use datafusion::physical_plan::{
3536
use datafusion::{
3637
catalog::CatalogProviderList, execution::TaskContext, physical_plan::SendableRecordBatchStream,
3738
};
38-
use datafusion_common::{DataFusionError, Result, ScalarValue, ToDFSchema};
39+
use datafusion_common::{DFSchema, DataFusionError, Result, ScalarValue};
3940
use datafusion_expr::{Expr, Operator, TableProviderFilterPushDown, TableType};
4041
use datafusion_physical_plan::execution_plan::{Boundedness, EmissionType};
4142
use futures::stream::{self, BoxStream};
@@ -103,7 +104,7 @@ impl TableProvider for FileMetadata {
103104
filters: &[Expr],
104105
limit: Option<usize>,
105106
) -> Result<Arc<dyn ExecutionPlan>> {
106-
let dfschema = self.table_schema.clone().to_dfschema()?;
107+
let dfschema = DFSchema::try_from(self.table_schema.as_ref().clone())?;
107108

108109
let filters = filters
109110
.iter()

src/materialized/hive_partition.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
use std::sync::Arc;
1919

2020
use arrow::array::{Array, StringArray, StringBuilder};
21-
use arrow_schema::DataType;
21+
use datafusion::arrow::datatypes::DataType;
2222

2323
use datafusion_common::{DataFusionError, Result, ScalarValue};
2424
use datafusion_expr::{
@@ -79,7 +79,7 @@ pub fn hive_partition_udf() -> ScalarUDF {
7979
ScalarUDF::new_from_impl(udf_impl)
8080
}
8181

82-
#[derive(Debug)]
82+
#[derive(Debug, Hash, PartialEq, Eq)]
8383
struct HivePartitionUdf {
8484
pub signature: Signature,
8585
}

0 commit comments

Comments
 (0)