Skip to content

Commit 2c6f6d3

Browse files
authored
fix: derive custom nullable for spark make_dt_interval (#19236)
## Which issue does this PR close? <!-- We generally require a GitHub issue to be filed for all bug fixes and enhancements and this helps us generate change logs for our releases. You can link an issue to this PR using the GitHub syntax. For example `Closes #123` indicates that this PR will close issue #123. --> - Closes #19154 - Part of #19144 ## Rationale for this change <!-- Why are you proposing this change? If this is already explained clearly in the issue then this section is not needed. Explaining clearly why changes are proposed helps reviewers understand your changes and offer better suggestions for fixes. --> ## What changes are included in this PR? - Spark `make_dt_interval` now uses `return_field_from_args` <!-- There is no need to duplicate the description in the issue here but it is sometimes worth providing a summary of the individual changes in this PR. --> ## Are these changes tested? - All original test pass - Added new test to nullability check <!-- We typically require tests for all PRs in order to: 1. Prevent the code from being accidentally broken by subsequent changes 2. Serve as another way to document the expected behavior of the code If tests are not included in your PR, please explain why (for example, are they covered by existing tests)? --> ## Are there any user-facing changes? <!-- If there are user-facing changes then we may require documentation to be updated before approving the PR. --> <!-- If there are any breaking changes to public APIs, please add the `api change` label. -->
1 parent 18e4a0c commit 2c6f6d3

File tree

1 file changed

+83
-8
lines changed

1 file changed

+83
-8
lines changed

datafusion/spark/src/function/datetime/make_dt_interval.rs

Lines changed: 83 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,14 @@ use arrow::array::{
2222
Array, ArrayRef, AsArray, DurationMicrosecondBuilder, PrimitiveArray,
2323
};
2424
use arrow::datatypes::TimeUnit::Microsecond;
25-
use arrow::datatypes::{DataType, Float64Type, Int32Type};
25+
use arrow::datatypes::{DataType, Field, FieldRef, Float64Type, Int32Type};
2626
use datafusion_common::types::{logical_float64, logical_int32, NativeType};
27-
use datafusion_common::{plan_datafusion_err, DataFusionError, Result, ScalarValue};
27+
use datafusion_common::{
28+
internal_err, plan_datafusion_err, DataFusionError, Result, ScalarValue,
29+
};
2830
use datafusion_expr::{
29-
Coercion, ColumnarValue, ScalarFunctionArgs, ScalarUDFImpl, Signature, TypeSignature,
30-
TypeSignatureClass, Volatility,
31+
Coercion, ColumnarValue, ReturnFieldArgs, ScalarFunctionArgs, ScalarUDFImpl,
32+
Signature, TypeSignature, TypeSignatureClass, Volatility,
3133
};
3234
use datafusion_functions::utils::make_scalar_function;
3335

@@ -99,7 +101,28 @@ impl ScalarUDFImpl for SparkMakeDtInterval {
99101
///
100102
/// [Sail compatibility doc]: https://github.com/lakehq/sail/blob/dc5368daa24d40a7758a299e1ba8fc985cb29108/docs/guide/dataframe/data-types/compatibility.md?plain=1#L260
101103
fn return_type(&self, _arg_types: &[DataType]) -> Result<DataType> {
102-
Ok(DataType::Duration(Microsecond))
104+
internal_err!("return_field_from_args should be used instead")
105+
}
106+
107+
fn return_field_from_args(&self, args: ReturnFieldArgs) -> Result<FieldRef> {
108+
let has_non_finite_secs = args
109+
.scalar_arguments
110+
.get(3)
111+
.and_then(|arg| {
112+
arg.map(|scalar| match scalar {
113+
ScalarValue::Float64(Some(v)) => !v.is_finite(),
114+
ScalarValue::Float32(Some(v)) => !v.is_finite(),
115+
_ => false,
116+
})
117+
})
118+
.unwrap_or(false);
119+
let nullable =
120+
has_non_finite_secs || args.arg_fields.iter().any(|f| f.is_nullable());
121+
Ok(Arc::new(Field::new(
122+
self.name(),
123+
DataType::Duration(Microsecond),
124+
nullable,
125+
)))
103126
}
104127

105128
fn invoke_with_args(&self, args: ScalarFunctionArgs) -> Result<ColumnarValue> {
@@ -225,10 +248,9 @@ mod tests {
225248

226249
use arrow::array::{DurationMicrosecondArray, Float64Array, Int32Array};
227250
use arrow::datatypes::DataType::Duration;
228-
use arrow::datatypes::Field;
229-
use arrow::datatypes::TimeUnit::Microsecond;
251+
use arrow::datatypes::{DataType, Field, TimeUnit::Microsecond};
230252
use datafusion_common::{internal_datafusion_err, DataFusionError, Result};
231-
use datafusion_expr::{ColumnarValue, ScalarFunctionArgs};
253+
use datafusion_expr::{ColumnarValue, ReturnFieldArgs, ScalarFunctionArgs};
232254

233255
use super::*;
234256

@@ -292,6 +314,59 @@ mod tests {
292314
Ok(())
293315
}
294316

317+
#[test]
318+
fn return_field_respects_nullability() -> Result<()> {
319+
let udf = SparkMakeDtInterval::new();
320+
321+
// All nullable inputs -> nullable output
322+
let arg_fields = vec![
323+
Arc::new(Field::new("days", DataType::Int32, true)),
324+
Arc::new(Field::new("hours", DataType::Int32, true)),
325+
Arc::new(Field::new("mins", DataType::Int32, true)),
326+
Arc::new(Field::new("secs", DataType::Float64, true)),
327+
];
328+
329+
let out = udf.return_field_from_args(ReturnFieldArgs {
330+
arg_fields: &arg_fields,
331+
scalar_arguments: &[None, None, None, None],
332+
})?;
333+
assert!(out.is_nullable());
334+
assert_eq!(out.data_type(), &Duration(Microsecond));
335+
336+
// Non-nullable inputs -> non-nullable output
337+
let non_nullable_arg_fields = vec![
338+
Arc::new(Field::new("days", DataType::Int32, false)),
339+
Arc::new(Field::new("hours", DataType::Int32, false)),
340+
Arc::new(Field::new("mins", DataType::Int32, false)),
341+
Arc::new(Field::new("secs", DataType::Float64, false)),
342+
];
343+
344+
let out = udf.return_field_from_args(ReturnFieldArgs {
345+
arg_fields: &non_nullable_arg_fields,
346+
scalar_arguments: &[None, None, None, None],
347+
})?;
348+
assert!(!out.is_nullable());
349+
350+
// Non-finite secs scalar should force nullable even if fields are non-nullable
351+
let scalar_values =
352+
[None, None, None, Some(ScalarValue::Float64(Some(f64::NAN)))];
353+
let scalar_refs = scalar_values.iter().map(|v| v.as_ref()).collect::<Vec<_>>();
354+
let out = udf.return_field_from_args(ReturnFieldArgs {
355+
arg_fields: &non_nullable_arg_fields,
356+
scalar_arguments: &scalar_refs,
357+
})?;
358+
assert!(out.is_nullable());
359+
360+
// Zero-arg call (defaults) should also be non-nullable
361+
let out = udf.return_field_from_args(ReturnFieldArgs {
362+
arg_fields: &[],
363+
scalar_arguments: &[],
364+
})?;
365+
assert!(!out.is_nullable());
366+
367+
Ok(())
368+
}
369+
295370
#[test]
296371
fn error_months_overflow_should_be_null() -> Result<()> {
297372
// months = year*12 + month → NULL

0 commit comments

Comments
 (0)