@@ -389,7 +389,13 @@ def task(i: int) -> float:
389
389
Used for specifying feature types without constructing a dataframe. See
390
390
:py:class:`DMatrix` for details.
391
391
392
- max_cat_to_onehot : { Optional [int ]}
392
+ feature_weights : Optional[ArrayLike]
393
+
394
+ Weight for each feature, defines the probability of each feature being selected
395
+ when colsample is being used. All values must be greater than 0, otherwise a
396
+ `ValueError` is thrown.
397
+
398
+ max_cat_to_onehot : Optional[int]
393
399
394
400
.. versionadded:: 1.6.0
395
401
@@ -607,7 +613,7 @@ def _wrap_evaluation_matrices(
607
613
qid : Optional [Any ],
608
614
sample_weight : Optional [Any ],
609
615
base_margin : Optional [Any ],
610
- feature_weights : Optional [Any ],
616
+ feature_weights : Optional [ArrayLike ],
611
617
eval_set : Optional [Sequence [Tuple [Any , Any ]]],
612
618
sample_weight_eval_set : Optional [Sequence [Any ]],
613
619
base_margin_eval_set : Optional [Sequence [Any ]],
@@ -753,6 +759,7 @@ def __init__(
753
759
validate_parameters : Optional [bool ] = None ,
754
760
enable_categorical : bool = False ,
755
761
feature_types : Optional [FeatureTypes ] = None ,
762
+ feature_weights : Optional [ArrayLike ] = None ,
756
763
max_cat_to_onehot : Optional [int ] = None ,
757
764
max_cat_threshold : Optional [int ] = None ,
758
765
multi_strategy : Optional [str ] = None ,
@@ -799,6 +806,7 @@ def __init__(
799
806
self .validate_parameters = validate_parameters
800
807
self .enable_categorical = enable_categorical
801
808
self .feature_types = feature_types
809
+ self .feature_weights = feature_weights
802
810
self .max_cat_to_onehot = max_cat_to_onehot
803
811
self .max_cat_threshold = max_cat_threshold
804
812
self .multi_strategy = multi_strategy
@@ -895,6 +903,7 @@ def _wrapper_params(self) -> Set[str]:
895
903
"early_stopping_rounds" ,
896
904
"callbacks" ,
897
905
"feature_types" ,
906
+ "feature_weights" ,
898
907
}
899
908
return wrapper_specific
900
909
@@ -1065,10 +1074,12 @@ def _configure_fit(
1065
1074
self ,
1066
1075
booster : Optional [Union [Booster , "XGBModel" , str ]],
1067
1076
params : Dict [str , Any ],
1077
+ feature_weights : Optional [ArrayLike ],
1068
1078
) -> Tuple [
1069
1079
Optional [Union [Booster , str , "XGBModel" ]],
1070
1080
Optional [Metric ],
1071
1081
Dict [str , Any ],
1082
+ Optional [ArrayLike ],
1072
1083
]:
1073
1084
"""Configure parameters for :py:meth:`fit`."""
1074
1085
if isinstance (booster , XGBModel ):
@@ -1101,13 +1112,23 @@ def _duplicated(parameter: str) -> None:
1101
1112
else :
1102
1113
params .update ({"eval_metric" : self .eval_metric })
1103
1114
1115
+ if feature_weights is not None :
1116
+ _deprecated ("feature_weights" )
1117
+ if feature_weights is not None and self .feature_weights is not None :
1118
+ _duplicated ("feature_weights" )
1119
+ feature_weights = (
1120
+ self .feature_weights
1121
+ if self .feature_weights is not None
1122
+ else feature_weights
1123
+ )
1124
+
1104
1125
tree_method = params .get ("tree_method" , None )
1105
1126
if self .enable_categorical and tree_method == "exact" :
1106
1127
raise ValueError (
1107
1128
"Experimental support for categorical data is not implemented for"
1108
1129
" current tree method yet."
1109
1130
)
1110
- return model , metric , params
1131
+ return model , metric , params , feature_weights
1111
1132
1112
1133
def _create_dmatrix (self , ref : Optional [DMatrix ], ** kwargs : Any ) -> DMatrix :
1113
1134
# Use `QuantileDMatrix` to save memory.
@@ -1184,12 +1205,19 @@ def fit(
1184
1205
A list of the form [M_1, M_2, ..., M_n], where each M_i is an array like
1185
1206
object storing base margin for the i-th validation set.
1186
1207
feature_weights :
1187
- Weight for each feature, defines the probability of each feature being
1188
- selected when colsample is being used. All values must be greater than 0,
1189
- otherwise a `ValueError` is thrown.
1208
+
1209
+ .. deprecated:: 3.0.0
1210
+
1211
+ Use `feature_weights` in :py:meth:`__init__` or :py:meth:`set_params`
1212
+ instead.
1190
1213
1191
1214
"""
1192
1215
with config_context (verbosity = self .verbosity ):
1216
+ params = self .get_xgb_params ()
1217
+ model , metric , params , feature_weights = self ._configure_fit (
1218
+ xgb_model , params , feature_weights
1219
+ )
1220
+
1193
1221
evals_result : TrainingCallback .EvalsLog = {}
1194
1222
train_dmatrix , evals = _wrap_evaluation_matrices (
1195
1223
missing = self .missing ,
@@ -1209,15 +1237,13 @@ def fit(
1209
1237
enable_categorical = self .enable_categorical ,
1210
1238
feature_types = self .feature_types ,
1211
1239
)
1212
- params = self .get_xgb_params ()
1213
1240
1214
1241
if callable (self .objective ):
1215
1242
obj : Optional [Objective ] = _objective_decorator (self .objective )
1216
1243
params ["objective" ] = "reg:squarederror"
1217
1244
else :
1218
1245
obj = None
1219
1246
1220
- model , metric , params = self ._configure_fit (xgb_model , params )
1221
1247
self ._Booster = train (
1222
1248
params ,
1223
1249
train_dmatrix ,
@@ -1631,7 +1657,9 @@ def fit(
1631
1657
params ["objective" ] = "multi:softprob"
1632
1658
params ["num_class" ] = self .n_classes_
1633
1659
1634
- model , metric , params = self ._configure_fit (xgb_model , params )
1660
+ model , metric , params , feature_weights = self ._configure_fit (
1661
+ xgb_model , params , feature_weights
1662
+ )
1635
1663
train_dmatrix , evals = _wrap_evaluation_matrices (
1636
1664
missing = self .missing ,
1637
1665
X = X ,
@@ -2148,8 +2176,9 @@ def fit(
2148
2176
evals_result : TrainingCallback .EvalsLog = {}
2149
2177
params = self .get_xgb_params ()
2150
2178
2151
- model , metric , params = self ._configure_fit (xgb_model , params )
2152
-
2179
+ model , metric , params , feature_weights = self ._configure_fit (
2180
+ xgb_model , params , feature_weights
2181
+ )
2153
2182
self ._Booster = train (
2154
2183
params ,
2155
2184
train_dmatrix ,
0 commit comments