Skip to content

Commit 6d108d1

Browse files
use larger bfgs memory (#2752)
1 parent 1790f52 commit 6d108d1

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

daal4py/sklearn/linear_model/logistic_path.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -249,6 +249,11 @@ def __logistic_regression_path(
249249
iprint = [-1, 50, 1, 100, 101][
250250
np.searchsorted(np.array([0, 1, 2, 3]), verbose)
251251
]
252+
# Note: this uses more correction pairs than the implementation in scikit-learn,
253+
# which means better approximation of the Hessian at the expense of slower updates.
254+
# This is beneficial for high-dimensional convex problems without bound constraints
255+
# like the logistic regression being fitted here. For larger problems with sparse
256+
# data (currently not supported), it might benefit from increasing the number further.
252257
opt_res = optimize.minimize(
253258
func,
254259
w0,
@@ -257,6 +262,7 @@ def __logistic_regression_path(
257262
args=extra_args,
258263
options={
259264
"maxiter": max_iter,
265+
"maxcor": 50,
260266
"maxls": 50,
261267
"gtol": tol,
262268
"ftol": 64 * np.finfo(float).eps,

0 commit comments

Comments
 (0)