@@ -63,7 +63,7 @@ class APLRRegressor
6363 void initialize (const std::vector<size_t > &prioritized_predictors_indexes, const std::vector<int > &monotonic_constraints);
6464 bool check_if_base_term_has_only_one_unique_value (size_t base_term);
6565 void add_term_to_terms_eligible_current (Term &term);
66- VectorXd calculate_neg_gradient_current ();
66+ VectorXd calculate_neg_gradient_current (const VectorXd &sample_weight_train );
6767 void execute_boosting_steps ();
6868 void execute_boosting_step (size_t boosting_step);
6969 std::vector<size_t > find_terms_eligible_current_indexes_for_a_base_term (size_t base_term);
@@ -580,7 +580,7 @@ void APLRRegressor::add_term_to_terms_eligible_current(Term &term)
580580 terms_eligible_current.push_back (term);
581581}
582582
583- VectorXd APLRRegressor::calculate_neg_gradient_current ()
583+ VectorXd APLRRegressor::calculate_neg_gradient_current (const VectorXd &sample_weight_train )
584584{
585585 VectorXd output;
586586 if (family==" gaussian" )
@@ -609,7 +609,10 @@ VectorXd APLRRegressor::calculate_neg_gradient_current()
609609 }
610610 }
611611 else if (family==" mae" )
612- output=(y_train.array () - predictions_current.array ()).sign ();
612+ {
613+ double mae{calculate_errors (y_train,predictions_current,sample_weight_train," mae" ).mean ()};
614+ output=(y_train.array () - predictions_current.array ()).sign ()*mae;
615+ }
613616
614617 if (link_function!=" identity" )
615618 output=output.array ()*differentiate_predictions ().array ();
@@ -700,7 +703,7 @@ void APLRRegressor::update_linear_predictor_and_predictions()
700703
701704void APLRRegressor::update_gradient_and_errors ()
702705{
703- neg_gradient_current=calculate_neg_gradient_current ();
706+ neg_gradient_current=calculate_neg_gradient_current (sample_weight_train );
704707 neg_gradient_nullmodel_errors_sum=calculate_sum_error (calculate_errors (neg_gradient_current,linear_predictor_null_model,sample_weight_train,FAMILY_GAUSSIAN));
705708}
706709
0 commit comments