Skip to content

Commit f62cdc8

Browse files
author
ZebinYang
committed
skip grid search cv in build leaf if param_dict is empty; version 0.2.4
1 parent dd87fd0 commit f62cdc8

File tree

3 files changed

+120
-34
lines changed

3 files changed

+120
-34
lines changed

examples/demo.ipynb

Lines changed: 118 additions & 32 deletions
Large diffs are not rendered by default.

simtree/glmtree.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ def build_leaf(self, sample_indice):
9595
best_estimator = LogisticRegressionCV(Cs=self.reg_lambda, penalty="l1", solver="liblinear", scoring="roc_auc",
9696
cv=5, random_state=self.random_state)
9797
else:
98-
best_estimator = LogisticRegression(alpha=self.reg_lambda[0], precompute=False, random_state=self.random_state)
98+
best_estimator = LogisticRegression(C=self.reg_lambda[0], random_state=self.random_state)
9999

100100
mx = self.x[sample_indice].mean(0)
101101
sx = self.x[sample_indice].std(0) + self.EPSILON

simtree/simtree.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -485,7 +485,7 @@ def build_leaf(self, sample_indice):
485485

486486
param_size = len(self.reg_lambda)
487487
if param_size == 1:
488-
best_estimator = SimRegressor(reg_lambda=[self.reg_lambda[0]], reg_gamma=self.reg_gamma, degree=self.degree,
488+
best_estimator = SimClassifier(reg_lambda=[self.reg_lambda[0]], reg_gamma=self.reg_gamma, degree=self.degree,
489489
knot_num=self.knot_num, random_state=self.random_state)
490490
best_estimator.fit(self.x[sample_indice], self.y[sample_indice].ravel())
491491
predict_func = lambda x: best_estimator.decision_function(x)

0 commit comments

Comments
 (0)