|
8 | 8 |
|
9 | 9 | import numpy as np |
10 | 10 | from numpy.linalg import norm |
11 | | -from numpy.testing import assert_allclose, assert_array_less |
| 11 | +from numpy.testing import assert_allclose, assert_array_less, assert_array_equal |
12 | 12 | import pytest |
13 | 13 |
|
14 | 14 | from sklearn.exceptions import ConvergenceWarning |
@@ -218,6 +218,33 @@ def test_weights_lasso(): |
218 | 218 | np.testing.assert_raises(ValueError, clf1.fit, X=X, y=y) |
219 | 219 |
|
220 | 220 |
|
| 221 | +@pytest.mark.parametrize("pb", ["lasso", "logreg"]) |
| 222 | +def test_infinite_weights(pb): |
| 223 | + n_samples, n_features = 50, 100 |
| 224 | + X, y = build_dataset(n_samples, n_features) |
| 225 | + if pb == "logreg": |
| 226 | + y = np.sign(y) |
| 227 | + |
| 228 | + np.random.seed(1) |
| 229 | + weights = np.abs(np.random.randn(n_features)) |
| 230 | + n_inf = n_features // 10 |
| 231 | + inf_indices = np.random.choice(n_features, size=n_inf, replace=False) |
| 232 | + weights[inf_indices] = np.inf |
| 233 | + |
| 234 | + alpha = norm(X.T @ y / weights, ord=np.inf) / n_samples / 100 |
| 235 | + |
| 236 | + tol = 1e-8 |
| 237 | + _, coefs, dual_gaps = celer_path( |
| 238 | + X, y, pb=pb, alphas=[alpha], weights=weights, tol=tol) |
| 239 | + |
| 240 | + if pb == "logreg": |
| 241 | + assert_array_less(dual_gaps[0], tol * n_samples * np.log(2)) |
| 242 | + else: |
| 243 | + assert_array_less(dual_gaps[0], tol * norm(y) ** 2 / 2.) |
| 244 | + |
| 245 | + assert_array_equal(coefs[inf_indices], 0) |
| 246 | + |
| 247 | + |
221 | 248 | def test_zero_iter(): |
222 | 249 | X, y = build_dataset(n_samples=30, n_features=50) |
223 | 250 |
|
|
0 commit comments