Skip to content

Commit 49cfee0

Browse files
authored
fix: derive custom nullable for spark date_sub (#19225)
## Which issue does this PR close? <!-- We generally require a GitHub issue to be filed for all bug fixes and enhancements and this helps us generate change logs for our releases. You can link an issue to this PR using the GitHub syntax. For example `Closes #123` indicates that this PR will close issue #123. --> - Closes #19152. - Part of #19144 ## Rationale for this change - As stated in the original issue the UDF uses the default is_nullable which is always true which is not the case. <!-- Why are you proposing this change? If this is already explained clearly in the issue then this section is not needed. Explaining clearly why changes are proposed helps reviewers understand your changes and offer better suggestions for fixes. --> ## What changes are included in this PR? - Spark `date_sub` now reports schema using `return_field_from_args` - Added unit tests <!-- There is no need to duplicate the description in the issue here but it is sometimes worth providing a summary of the individual changes in this PR. --> ## Are these changes tested? - All original tests pass - Added new unit tests for the changes <!-- We typically require tests for all PRs in order to: 1. Prevent the code from being accidentally broken by subsequent changes 2. Serve as another way to document the expected behavior of the code If tests are not included in your PR, please explain why (for example, are they covered by existing tests)? --> ## Are there any user-facing changes? <!-- If there are user-facing changes then we may require documentation to be updated before approving the PR. --> <!-- If there are any breaking changes to public APIs, please add the `api change` label. -->
1 parent c4ca946 commit 49cfee0

File tree

1 file changed

+76
-4
lines changed

1 file changed

+76
-4
lines changed

datafusion/spark/src/function/datetime/date_sub.rs

Lines changed: 76 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,15 @@ use std::sync::Arc;
2020

2121
use arrow::array::ArrayRef;
2222
use arrow::compute;
23-
use arrow::datatypes::{DataType, Date32Type};
23+
use arrow::datatypes::{DataType, Date32Type, Field, FieldRef};
2424
use arrow::error::ArrowError;
2525
use datafusion_common::cast::{
2626
as_date32_array, as_int16_array, as_int32_array, as_int8_array,
2727
};
2828
use datafusion_common::{internal_err, Result};
2929
use datafusion_expr::{
30-
ColumnarValue, ScalarFunctionArgs, ScalarUDFImpl, Signature, TypeSignature,
31-
Volatility,
30+
ColumnarValue, ReturnFieldArgs, ScalarFunctionArgs, ScalarUDFImpl, Signature,
31+
TypeSignature, Volatility,
3232
};
3333
use datafusion_functions::utils::make_scalar_function;
3434

@@ -72,7 +72,21 @@ impl ScalarUDFImpl for SparkDateSub {
7272
}
7373

7474
fn return_type(&self, _arg_types: &[DataType]) -> Result<DataType> {
75-
Ok(DataType::Date32)
75+
internal_err!("return_field_from_args should be used instead")
76+
}
77+
78+
fn return_field_from_args(&self, args: ReturnFieldArgs) -> Result<FieldRef> {
79+
let nullable = args.arg_fields.iter().any(|f| f.is_nullable())
80+
|| args
81+
.scalar_arguments
82+
.iter()
83+
.any(|arg| matches!(arg, Some(sv) if sv.is_null()));
84+
85+
Ok(Arc::new(Field::new(
86+
self.name(),
87+
DataType::Date32,
88+
nullable,
89+
)))
7690
}
7791

7892
fn invoke_with_args(&self, args: ScalarFunctionArgs) -> Result<ColumnarValue> {
@@ -134,3 +148,61 @@ fn spark_date_sub(args: &[ArrayRef]) -> Result<ArrayRef> {
134148
};
135149
Ok(Arc::new(result))
136150
}
151+
152+
#[cfg(test)]
153+
mod tests {
154+
use super::*;
155+
use datafusion_common::ScalarValue;
156+
157+
#[test]
158+
fn test_date_sub_nullability_non_nullable_args() {
159+
let udf = SparkDateSub::new();
160+
let date_field = Arc::new(Field::new("d", DataType::Date32, false));
161+
let days_field = Arc::new(Field::new("n", DataType::Int32, false));
162+
163+
let result = udf
164+
.return_field_from_args(ReturnFieldArgs {
165+
arg_fields: &[date_field, days_field],
166+
scalar_arguments: &[None, None],
167+
})
168+
.unwrap();
169+
170+
assert!(!result.is_nullable());
171+
assert_eq!(result.data_type(), &DataType::Date32);
172+
}
173+
174+
#[test]
175+
fn test_date_sub_nullability_nullable_arg() {
176+
let udf = SparkDateSub::new();
177+
let date_field = Arc::new(Field::new("d", DataType::Date32, false));
178+
let nullable_days_field = Arc::new(Field::new("n", DataType::Int32, true));
179+
180+
let result = udf
181+
.return_field_from_args(ReturnFieldArgs {
182+
arg_fields: &[date_field, nullable_days_field],
183+
scalar_arguments: &[None, None],
184+
})
185+
.unwrap();
186+
187+
assert!(result.is_nullable());
188+
assert_eq!(result.data_type(), &DataType::Date32);
189+
}
190+
191+
#[test]
192+
fn test_date_sub_nullability_scalar_null_argument() {
193+
let udf = SparkDateSub::new();
194+
let date_field = Arc::new(Field::new("d", DataType::Date32, false));
195+
let days_field = Arc::new(Field::new("n", DataType::Int32, false));
196+
let null_scalar = ScalarValue::Int32(None);
197+
198+
let result = udf
199+
.return_field_from_args(ReturnFieldArgs {
200+
arg_fields: &[date_field, days_field],
201+
scalar_arguments: &[None, Some(&null_scalar)],
202+
})
203+
.unwrap();
204+
205+
assert!(result.is_nullable());
206+
assert_eq!(result.data_type(), &DataType::Date32);
207+
}
208+
}

0 commit comments

Comments
 (0)