@@ -967,6 +967,12 @@ class SparseLogisticRegression(LinearClassifierMixin, SparseCoefMixin, BaseEstim
967
967
alpha : float, default=1.0
968
968
Regularization strength; must be a positive float.
969
969
970
+ l1_ratio : float, default=1.0
971
+ The ElasticNet mixing parameter, with ``0 <= l1_ratio <= 1``. For
972
+ ``l1_ratio = 0`` the penalty is an L2 penalty. ``For l1_ratio = 1`` it
973
+ is an L1 penalty. For ``0 < l1_ratio < 1``, the penalty is a
974
+ combination of L1 and L2.
975
+
970
976
tol : float, optional
971
977
Stopping criterion for the optimization.
972
978
@@ -1003,10 +1009,11 @@ class SparseLogisticRegression(LinearClassifierMixin, SparseCoefMixin, BaseEstim
1003
1009
Number of subproblems solved to reach the specified tolerance.
1004
1010
"""
1005
1011
1006
- def __init__ (self , alpha = 1.0 , tol = 1e-4 , max_iter = 20 , max_epochs = 1_000 , verbose = 0 ,
1007
- fit_intercept = True , warm_start = False ):
1012
+ def __init__ (self , alpha = 1.0 , l1_ratio = 1.0 , tol = 1e-4 , max_iter = 20 , max_epochs = 1_000 ,
1013
+ verbose = 0 , fit_intercept = True , warm_start = False ):
1008
1014
super ().__init__ ()
1009
1015
self .alpha = alpha
1016
+ self .l1_ratio = l1_ratio
1010
1017
self .tol = tol
1011
1018
self .max_iter = max_iter
1012
1019
self .max_epochs = max_epochs
@@ -1035,7 +1042,8 @@ def fit(self, X, y):
1035
1042
max_iter = self .max_iter , max_pn_iter = self .max_epochs , tol = self .tol ,
1036
1043
fit_intercept = self .fit_intercept , warm_start = self .warm_start ,
1037
1044
verbose = self .verbose )
1038
- return _glm_fit (X , y , self , Logistic (), L1 (self .alpha ), solver )
1045
+ return _glm_fit (X , y , self , Logistic (), L1_plus_L2 (self .alpha , self .l1_ratio ),
1046
+ solver )
1039
1047
1040
1048
def predict_proba (self , X ):
1041
1049
"""Probability estimates.
0 commit comments