diff --git a/Cargo.toml b/Cargo.toml index d77831d..898a3e7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "datafusion-functions-json" -version = "0.47.0" +version = "0.48.0" edition = "2021" description = "JSON functions for DataFusion" readme = "README.md" @@ -11,16 +11,16 @@ repository = "https://github.com/datafusion-contrib/datafusion-functions-json/" rust-version = "1.82.0" [dependencies] -datafusion = { version = "47", default-features = false } +datafusion = { version = "48", default-features = false } jiter = "0.9" paste = "1" log = "0.4" [dev-dependencies] -datafusion = { version = "47", default-features = false, features = ["nested_expressions"] } +datafusion = { version = "48", default-features = false, features = [ + "nested_expressions", +] } codspeed-criterion-compat = "2.6" -criterion = "0.5.1" -clap = "4" tokio = { version = "1.43", features = ["full"] } [lints.clippy] diff --git a/src/rewrite.rs b/src/rewrite.rs index 4161154..20aad95 100644 --- a/src/rewrite.rs +++ b/src/rewrite.rs @@ -80,7 +80,7 @@ fn unnest_json_calls(func: &ScalarFunction) -> Option> { let mut args = inner_func.args.clone(); args.extend(outer_args_iter.cloned()); // See #23, unnest only when all lookup arguments are literals - if args.iter().skip(1).all(|arg| matches!(arg, Expr::Literal(_))) { + if args.iter().skip(1).all(|arg| matches!(arg, Expr::Literal(_, _))) { Some(Transformed::yes(Expr::ScalarFunction(ScalarFunction { func: func.func.clone(), args, @@ -149,7 +149,7 @@ fn expr_to_sql_repr(expr: &Expr) -> String { .as_ref() .map_or_else(|| name.clone(), |r| format!("{r}.{name}")), Expr::Alias(alias) => alias.name.clone(), - Expr::Literal(scalar) => match scalar { + Expr::Literal(scalar, _) => match scalar { ScalarValue::Utf8(Some(v)) | ScalarValue::Utf8View(Some(v)) | ScalarValue::LargeUtf8(Some(v)) => { format!("'{v}'") } diff --git a/tests/main.rs b/tests/main.rs index f591385..5b9ebff 100644 --- a/tests/main.rs +++ b/tests/main.rs @@ -1,3 +1,4 @@ +use std::collections::HashMap; use std::sync::Arc; use datafusion::arrow::array::{Array, ArrayRef, DictionaryArray, RecordBatch}; @@ -503,8 +504,16 @@ fn test_json_get_utf8() { let ColumnarValue::Scalar(sv) = json_get_str .invoke_with_args(ScalarFunctionArgs { args: args.to_vec(), + arg_fields: vec![ + Arc::new(Field::new("arg_1", DataType::LargeUtf8, false)), + Arc::new(Field::new("arg_2", DataType::LargeUtf8, false)), + Arc::new(Field::new("arg_3", DataType::LargeUtf8, false)), + ], number_rows: 1, - return_type: &DataType::Utf8, + return_field: Arc::new( + Field::new("ret_field", DataType::Utf8, false) + .with_metadata(HashMap::from_iter(vec![("is_json".to_string(), "true".to_string())])), + ), }) .unwrap() else { @@ -528,8 +537,16 @@ fn test_json_get_large_utf8() { let ColumnarValue::Scalar(sv) = json_get_str .invoke_with_args(ScalarFunctionArgs { args: args.to_vec(), + arg_fields: vec![ + Arc::new(Field::new("arg_1", DataType::LargeUtf8, false)), + Arc::new(Field::new("arg_2", DataType::LargeUtf8, false)), + Arc::new(Field::new("arg_3", DataType::LargeUtf8, false)), + ], number_rows: 1, - return_type: &DataType::LargeUtf8, + return_field: Arc::new( + Field::new("ret_field", DataType::Utf8, false) + .with_metadata(HashMap::from_iter(vec![("is_json".to_string(), "true".to_string())])), + ), }) .unwrap() else {