Skip to content

Commit 0ed7282

Browse files
TST add unit testing for inf weights (#234)
Co-authored-by: mathurinm <[email protected]>
1 parent 64aed96 commit 0ed7282

File tree

3 files changed

+56
-4
lines changed

3 files changed

+56
-4
lines changed

celer/tests/test_lasso.py

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
import numpy as np
1010
from numpy.linalg import norm
11-
from numpy.testing import assert_allclose, assert_array_less
11+
from numpy.testing import assert_allclose, assert_array_less, assert_array_equal
1212
import pytest
1313

1414
from sklearn.exceptions import ConvergenceWarning
@@ -218,6 +218,33 @@ def test_weights_lasso():
218218
np.testing.assert_raises(ValueError, clf1.fit, X=X, y=y)
219219

220220

221+
@pytest.mark.parametrize("pb", ["lasso", "logreg"])
222+
def test_infinite_weights(pb):
223+
n_samples, n_features = 50, 100
224+
X, y = build_dataset(n_samples, n_features)
225+
if pb == "logreg":
226+
y = np.sign(y)
227+
228+
np.random.seed(1)
229+
weights = np.abs(np.random.randn(n_features))
230+
n_inf = n_features // 10
231+
inf_indices = np.random.choice(n_features, size=n_inf, replace=False)
232+
weights[inf_indices] = np.inf
233+
234+
alpha = norm(X.T @ y / weights, ord=np.inf) / n_samples / 100
235+
236+
tol = 1e-8
237+
_, coefs, dual_gaps = celer_path(
238+
X, y, pb=pb, alphas=[alpha], weights=weights, tol=tol)
239+
240+
if pb == "logreg":
241+
assert_array_less(dual_gaps[0], tol * n_samples * np.log(2))
242+
else:
243+
assert_array_less(dual_gaps[0], tol * norm(y) ** 2 / 2.)
244+
245+
assert_array_equal(coefs[inf_indices], 0)
246+
247+
221248
def test_zero_iter():
222249
X, y = build_dataset(n_samples=30, n_features=50)
223250

celer/tests/test_logreg.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
# Author: Mathurin Massias <[email protected]>
22
# License: BSD 3 clause
3-
43
import pytest
54
import numpy as np
65
from numpy.linalg import norm
76

8-
from numpy.testing import assert_allclose, assert_array_less
7+
from numpy.testing import assert_allclose, assert_array_less, assert_array_equal
98
from sklearn.linear_model._logistic import _logistic_regression_path
109
from sklearn.utils.estimator_checks import check_estimator
1110
from sklearn.linear_model import LogisticRegression as sklearn_Logreg

celer/tests/test_mtl.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
import pytest
2+
import warnings
23
import itertools
4+
35
import numpy as np
46
from numpy.linalg import norm
5-
from numpy.testing import assert_allclose, assert_array_less
7+
from numpy.testing import assert_allclose, assert_array_less, assert_array_equal
68

79
from sklearn.utils.estimator_checks import check_estimator
810
from sklearn.linear_model import MultiTaskLassoCV as sklearn_MultiTaskLassoCV
@@ -241,5 +243,29 @@ def test_check_weights():
241243
np.testing.assert_raises(ValueError, clf.fit, X=X, y=y)
242244

243245

246+
def test_infinite_weights_group():
247+
n_samples, n_features = 50, 100
248+
X, y = build_dataset(n_samples, n_features)
249+
250+
np.random.seed(1)
251+
group_size = 5
252+
weights = np.abs(np.random.randn(n_features // group_size))
253+
n_inf = 3
254+
inf_indices = np.random.choice(
255+
n_features // group_size, size=n_inf, replace=False)
256+
weights[inf_indices] = np.inf
257+
alpha_max = np.max(
258+
norm((X.T @ y).reshape(-1, group_size), 2, axis=1)
259+
) / n_samples
260+
261+
clf = GroupLasso(
262+
alpha=alpha_max / 100., weights=weights, groups=group_size, tol=1e-8
263+
).fit(X, y)
264+
265+
assert_array_less(clf.dual_gap_, clf.tol * norm(y) ** 2 / 2)
266+
assert_array_equal(
267+
norm(clf.coef_.reshape(-1, group_size), axis=1)[inf_indices], 0)
268+
269+
244270
if __name__ == "__main__":
245271
pass

0 commit comments

Comments
 (0)