Skip to content

Commit 9291abb

Browse files
cleaner code
1 parent 325b54f commit 9291abb

File tree

1 file changed

+13
-13
lines changed

1 file changed

+13
-13
lines changed

cpp/APLRRegressor.h

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ class APLRRegressor
4949
void validate_input_to_fit(const MatrixXd &X,const VectorXd &y,const VectorXd &sample_weight,const std::vector<std::string> &X_names, const std::vector<size_t> &validation_set_indexes);
5050
void throw_error_if_validation_set_indexes_has_invalid_indexes(const VectorXd &y, const std::vector<size_t> &validation_set_indexes);
5151
void define_training_and_validation_sets(const MatrixXd &X,const VectorXd &y,const VectorXd &sample_weight, const std::vector<size_t> &validation_set_indexes);
52-
void initialize(const MatrixXd &X);
53-
bool check_if_base_term_has_only_one_unique_value(const MatrixXd &X, size_t base_term);
52+
void initialize();
53+
bool check_if_base_term_has_only_one_unique_value(size_t base_term);
5454
void add_term_to_terms_eligible_current(Term &term);
5555
VectorXd calculate_neg_gradient_current(const VectorXd &y,const VectorXd &predictions_current);
5656
void execute_boosting_steps();
@@ -165,7 +165,7 @@ void APLRRegressor::fit(const MatrixXd &X,const VectorXd &y,const VectorXd &samp
165165
{
166166
validate_input_to_fit(X,y,sample_weight,X_names,validation_set_indexes);
167167
define_training_and_validation_sets(X,y,sample_weight,validation_set_indexes);
168-
initialize(X);
168+
initialize();
169169
execute_boosting_steps();
170170
update_coefficients_for_all_steps();
171171
print_final_summary();
@@ -178,7 +178,7 @@ void APLRRegressor::fit(const MatrixXd &X,const VectorXd &y,const VectorXd &samp
178178
void APLRRegressor::validate_input_to_fit(const MatrixXd &X,const VectorXd &y,const VectorXd &sample_weight,const std::vector<std::string> &X_names, const std::vector<size_t> &validation_set_indexes)
179179
{
180180
if(X.rows()!=y.size()) throw std::runtime_error("X and y must have the same number of rows.");
181-
if(X.rows()==0) throw std::runtime_error("X and y cannot have zero rows.");
181+
if(X.rows()<2) throw std::runtime_error("X and y cannot have less than two rows.");
182182
if(sample_weight.size()>0 && sample_weight.size()!=y.size()) throw std::runtime_error("sample_weight must have 0 or as many rows as X and y.");
183183
if(X_names.size()>0 && X_names.size()!=static_cast<size_t>(X.cols())) throw std::runtime_error("X_names must have as many columns as X.");
184184
throw_error_if_matrix_has_nan_or_infinite_elements(X, "X");
@@ -271,22 +271,22 @@ void APLRRegressor::define_training_and_validation_sets(const MatrixXd &X,const
271271
}
272272
}
273273

274-
void APLRRegressor::initialize(const MatrixXd &X)
274+
void APLRRegressor::initialize()
275275
{
276-
number_of_base_terms=static_cast<size_t>(X.cols());
276+
number_of_base_terms=static_cast<size_t>(X_train.cols());
277277

278-
terms.reserve(X.cols()*reserved_terms_times_num_x);
278+
terms.reserve(X_train.cols()*reserved_terms_times_num_x);
279279
terms.clear();
280280

281281
intercept=0;
282282
intercept_steps=VectorXd::Constant(m,0);
283283

284284
null_predictions=VectorXd::Constant(y_train.size(),0);
285285

286-
terms_eligible_current.reserve(X.cols()*reserved_terms_times_num_x);
287-
for (size_t i = 0; i < static_cast<size_t>(X.cols()); ++i)
286+
terms_eligible_current.reserve(X_train.cols()*reserved_terms_times_num_x);
287+
for (size_t i = 0; i < static_cast<size_t>(X_train.cols()); ++i)
288288
{
289-
bool term_has_one_unique_value{check_if_base_term_has_only_one_unique_value(X, i)};
289+
bool term_has_one_unique_value{check_if_base_term_has_only_one_unique_value(i)};
290290
if(!term_has_one_unique_value)
291291
{
292292
Term copy_of_base_term{Term(i)};
@@ -304,15 +304,15 @@ void APLRRegressor::initialize(const MatrixXd &X)
304304
neg_gradient_nullmodel_errors_sum=neg_gradient_nullmodel_errors.sum();
305305
}
306306

307-
bool APLRRegressor::check_if_base_term_has_only_one_unique_value(const MatrixXd &X, size_t base_term)
307+
bool APLRRegressor::check_if_base_term_has_only_one_unique_value(size_t base_term)
308308
{
309-
size_t rows{static_cast<size_t>(X.rows())};
309+
size_t rows{static_cast<size_t>(X_train.rows())};
310310
if(rows==1) return true;
311311

312312
bool term_has_one_unique_value{true};
313313
for (size_t i = 1; i < rows; ++i)
314314
{
315-
bool observation_is_equal_to_previous{check_if_approximately_equal(X.col(base_term)[i], X.col(base_term)[i-1])};
315+
bool observation_is_equal_to_previous{check_if_approximately_equal(X_train.col(base_term)[i], X_train.col(base_term)[i-1])};
316316
if(!observation_is_equal_to_previous)
317317
{
318318
term_has_one_unique_value=false;

0 commit comments

Comments
 (0)