@@ -64,7 +64,7 @@ use datafusion::{
6464use datafusion_comet_spark_expr:: {
6565 create_comet_physical_fun, create_comet_physical_fun_with_eval_mode, create_modulo_expr,
6666 create_negate_expr, BinaryOutputStyle , BloomFilterAgg , BloomFilterMightContain , EvalMode ,
67- SparkHour , SparkMinute , SparkSecond ,
67+ SparkHour , SparkMinute , SparkSecond , SumInteger ,
6868} ;
6969
7070use crate :: execution:: operators:: ExecutionError :: GeneralError ;
@@ -1875,6 +1875,17 @@ impl PhysicalPlanner {
18751875 let func = AggregateUDF :: new_from_impl ( SumDecimal :: try_new ( datatype) ?) ;
18761876 AggregateExprBuilder :: new ( Arc :: new ( func) , vec ! [ child] )
18771877 }
1878+ DataType :: Int8 | DataType :: Int16 | DataType :: Int32 | DataType :: Int64 => {
1879+ // let eval_mode = let eval_mode = from_protobuf_eval_mode(expr.eval_mode)?;
1880+ let eval_mode = if expr. fail_on_error {
1881+ EvalMode :: Ansi
1882+ } else {
1883+ EvalMode :: Legacy
1884+ } ;
1885+ let func =
1886+ AggregateUDF :: new_from_impl ( SumInteger :: try_new ( datatype, eval_mode) ?) ;
1887+ AggregateExprBuilder :: new ( Arc :: new ( func) , vec ! [ child] )
1888+ }
18781889 _ => {
18791890 // cast to the result data type of SUM if necessary, we should not expect
18801891 // a cast failure since it should have already been checked at Spark side
0 commit comments