Skip to content

Commit 7e03e55

Browse files
10.17.1
1 parent 4de2d31 commit 7e03e55

File tree

4 files changed

+32
-2
lines changed

4 files changed

+32
-2
lines changed

API_REFERENCE_FOR_REGRESSION.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,7 +136,7 @@ Specifies the (weighted) ridge penalty applied to the model. Positive values can
136136
If true, then a mean bias correction is applied to the model's intercept term. This can be useful for some loss functions, such as "huber", that can otherwise produce biased predictions. The correction is only applied for the "identity" and "log" link functions.
137137

138138
#### faster_convergence (default = False)
139-
If true, then a scaling is applied to the negative gradient to speed up convergence. This should primarily be used when the algorithm otherwise converges too slowly. This is only applied for the "identity" and "log" link functions.
139+
If true, then a scaling is applied to the negative gradient to speed up convergence. This should primarily be used when the algorithm otherwise converges too slowly or prematurely. This is only applied for the "identity" and "log" link functions.
140140
This will not speed up the combination of "mse" loss with an "identity" link, as this combination is already optimized for speed within the algorithm. Furthermore, this option is not effective for all loss functions, such as "mae" and "quantile".
141141

142142
## Method: fit(X:FloatMatrix, y:FloatVector, sample_weight:FloatVector = np.empty(0), X_names:List[str] = [], cv_observations:IntMatrix = np.empty([0, 0]), prioritized_predictors_indexes:List[int] = [], monotonic_constraints:List[int] = [], group:FloatVector = np.empty(0), interaction_constraints:List[List[int]] = [], other_data:FloatMatrix = np.empty([0, 0]), predictor_learning_rates:List[float] = [], predictor_penalties_for_non_linearity:List[float] = [], predictor_penalties_for_interactions:List[float] = [], predictor_min_observations_in_split: List[int] = [])

cpp/APLRRegressor.h

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -196,6 +196,7 @@ class APLRRegressor
196196
void throw_error_if_m_is_invalid();
197197
bool model_has_not_been_trained();
198198
void throw_error_if_quantile_is_invalid();
199+
void throw_error_if_validation_tuning_metric_is_invalid();
199200
std::vector<size_t> compute_relevant_term_indexes(const std::string &unique_term_affiliation);
200201
std::vector<double> compute_split_points(size_t predictor_index, const std::vector<size_t> &relevant_term_indexes);
201202
VectorXd compute_contribution_to_linear_predictor_from_specific_terms(const MatrixXd &X, const std::vector<size_t> &term_indexes,
@@ -471,6 +472,7 @@ void APLRRegressor::fit(const MatrixXd &X, const VectorXd &y, const VectorXd &sa
471472
throw_error_if_dispersion_parameter_is_invalid();
472473
throw_error_if_quantile_is_invalid();
473474
throw_error_if_m_is_invalid();
475+
throw_error_if_validation_tuning_metric_is_invalid();
474476
validate_input_to_fit(X, y, sample_weight, X_names, cv_observations, prioritized_predictors_indexes, monotonic_constraints, group,
475477
interaction_constraints, other_data, predictor_learning_rates, predictor_penalties_for_non_linearity,
476478
predictor_penalties_for_interactions);
@@ -707,6 +709,34 @@ void APLRRegressor::throw_error_if_quantile_is_invalid()
707709
}
708710
}
709711

712+
void APLRRegressor::throw_error_if_validation_tuning_metric_is_invalid()
713+
{
714+
bool metric_exists{false};
715+
if (validation_tuning_metric == "default")
716+
metric_exists = true;
717+
else if (validation_tuning_metric == "mse")
718+
metric_exists = true;
719+
else if (validation_tuning_metric == "mae")
720+
metric_exists = true;
721+
else if (validation_tuning_metric == "huber")
722+
metric_exists = true;
723+
else if (validation_tuning_metric == "negative_gini")
724+
metric_exists = true;
725+
else if (validation_tuning_metric == "group_mse")
726+
metric_exists = true;
727+
else if (validation_tuning_metric == "group_mse_by_prediction")
728+
metric_exists = true;
729+
else if (validation_tuning_metric == "neg_top_quantile_mean_response")
730+
metric_exists = true;
731+
else if (validation_tuning_metric == "bottom_quantile_mean_response")
732+
metric_exists = true;
733+
else if (validation_tuning_metric == "custom_function")
734+
metric_exists = true;
735+
736+
if (!metric_exists)
737+
throw std::runtime_error("validation_tuning_metric " + validation_tuning_metric + " is not available in APLR.");
738+
}
739+
710740
void APLRRegressor::validate_input_to_fit(const MatrixXd &X, const VectorXd &y, const VectorXd &sample_weight,
711741
const std::vector<std::string> &X_names, const MatrixXi &cv_observations,
712742
const std::vector<size_t> &prioritized_predictors_indexes, const std::vector<int> &monotonic_constraints, const VectorXi &group,
Binary file not shown.

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828

2929
setuptools.setup(
3030
name="aplr",
31-
version="10.17.0",
31+
version="10.17.1",
3232
description="Automatic Piecewise Linear Regression",
3333
ext_modules=[sfc_module],
3434
author="Mathias von Ottenbreit",

0 commit comments

Comments
 (0)