@@ -389,7 +389,13 @@ def task(i: int) -> float:
389389 Used for specifying feature types without constructing a dataframe. See
390390 :py:class:`DMatrix` for details.
391391
392- max_cat_to_onehot : { Optional [int ]}
392+ feature_weights : Optional[ArrayLike]
393+
394+ Weight for each feature, defines the probability of each feature being selected
395+ when colsample is being used. All values must be greater than 0, otherwise a
396+ `ValueError` is thrown.
397+
398+ max_cat_to_onehot : Optional[int]
393399
394400 .. versionadded:: 1.6.0
395401
@@ -607,7 +613,7 @@ def _wrap_evaluation_matrices(
607613 qid : Optional [Any ],
608614 sample_weight : Optional [Any ],
609615 base_margin : Optional [Any ],
610- feature_weights : Optional [Any ],
616+ feature_weights : Optional [ArrayLike ],
611617 eval_set : Optional [Sequence [Tuple [Any , Any ]]],
612618 sample_weight_eval_set : Optional [Sequence [Any ]],
613619 base_margin_eval_set : Optional [Sequence [Any ]],
@@ -753,6 +759,7 @@ def __init__(
753759 validate_parameters : Optional [bool ] = None ,
754760 enable_categorical : bool = False ,
755761 feature_types : Optional [FeatureTypes ] = None ,
762+ feature_weights : Optional [ArrayLike ] = None ,
756763 max_cat_to_onehot : Optional [int ] = None ,
757764 max_cat_threshold : Optional [int ] = None ,
758765 multi_strategy : Optional [str ] = None ,
@@ -799,6 +806,7 @@ def __init__(
799806 self .validate_parameters = validate_parameters
800807 self .enable_categorical = enable_categorical
801808 self .feature_types = feature_types
809+ self .feature_weights = feature_weights
802810 self .max_cat_to_onehot = max_cat_to_onehot
803811 self .max_cat_threshold = max_cat_threshold
804812 self .multi_strategy = multi_strategy
@@ -895,6 +903,7 @@ def _wrapper_params(self) -> Set[str]:
895903 "early_stopping_rounds" ,
896904 "callbacks" ,
897905 "feature_types" ,
906+ "feature_weights" ,
898907 }
899908 return wrapper_specific
900909
@@ -1065,10 +1074,12 @@ def _configure_fit(
10651074 self ,
10661075 booster : Optional [Union [Booster , "XGBModel" , str ]],
10671076 params : Dict [str , Any ],
1077+ feature_weights : Optional [ArrayLike ],
10681078 ) -> Tuple [
10691079 Optional [Union [Booster , str , "XGBModel" ]],
10701080 Optional [Metric ],
10711081 Dict [str , Any ],
1082+ Optional [ArrayLike ],
10721083 ]:
10731084 """Configure parameters for :py:meth:`fit`."""
10741085 if isinstance (booster , XGBModel ):
@@ -1101,13 +1112,23 @@ def _duplicated(parameter: str) -> None:
11011112 else :
11021113 params .update ({"eval_metric" : self .eval_metric })
11031114
1115+ if feature_weights is not None :
1116+ _deprecated ("feature_weights" )
1117+ if feature_weights is not None and self .feature_weights is not None :
1118+ _duplicated ("feature_weights" )
1119+ feature_weights = (
1120+ self .feature_weights
1121+ if self .feature_weights is not None
1122+ else feature_weights
1123+ )
1124+
11041125 tree_method = params .get ("tree_method" , None )
11051126 if self .enable_categorical and tree_method == "exact" :
11061127 raise ValueError (
11071128 "Experimental support for categorical data is not implemented for"
11081129 " current tree method yet."
11091130 )
1110- return model , metric , params
1131+ return model , metric , params , feature_weights
11111132
11121133 def _create_dmatrix (self , ref : Optional [DMatrix ], ** kwargs : Any ) -> DMatrix :
11131134 # Use `QuantileDMatrix` to save memory.
@@ -1184,12 +1205,19 @@ def fit(
11841205 A list of the form [M_1, M_2, ..., M_n], where each M_i is an array like
11851206 object storing base margin for the i-th validation set.
11861207 feature_weights :
1187- Weight for each feature, defines the probability of each feature being
1188- selected when colsample is being used. All values must be greater than 0,
1189- otherwise a `ValueError` is thrown.
1208+
1209+ .. deprecated:: 3.0.0
1210+
1211+ Use `feature_weights` in :py:meth:`__init__` or :py:meth:`set_params`
1212+ instead.
11901213
11911214 """
11921215 with config_context (verbosity = self .verbosity ):
1216+ params = self .get_xgb_params ()
1217+ model , metric , params , feature_weights = self ._configure_fit (
1218+ xgb_model , params , feature_weights
1219+ )
1220+
11931221 evals_result : TrainingCallback .EvalsLog = {}
11941222 train_dmatrix , evals = _wrap_evaluation_matrices (
11951223 missing = self .missing ,
@@ -1209,15 +1237,13 @@ def fit(
12091237 enable_categorical = self .enable_categorical ,
12101238 feature_types = self .feature_types ,
12111239 )
1212- params = self .get_xgb_params ()
12131240
12141241 if callable (self .objective ):
12151242 obj : Optional [Objective ] = _objective_decorator (self .objective )
12161243 params ["objective" ] = "reg:squarederror"
12171244 else :
12181245 obj = None
12191246
1220- model , metric , params = self ._configure_fit (xgb_model , params )
12211247 self ._Booster = train (
12221248 params ,
12231249 train_dmatrix ,
@@ -1631,7 +1657,9 @@ def fit(
16311657 params ["objective" ] = "multi:softprob"
16321658 params ["num_class" ] = self .n_classes_
16331659
1634- model , metric , params = self ._configure_fit (xgb_model , params )
1660+ model , metric , params , feature_weights = self ._configure_fit (
1661+ xgb_model , params , feature_weights
1662+ )
16351663 train_dmatrix , evals = _wrap_evaluation_matrices (
16361664 missing = self .missing ,
16371665 X = X ,
@@ -2148,8 +2176,9 @@ def fit(
21482176 evals_result : TrainingCallback .EvalsLog = {}
21492177 params = self .get_xgb_params ()
21502178
2151- model , metric , params = self ._configure_fit (xgb_model , params )
2152-
2179+ model , metric , params , feature_weights = self ._configure_fit (
2180+ xgb_model , params , feature_weights
2181+ )
21532182 self ._Booster = train (
21542183 params ,
21552184 train_dmatrix ,
0 commit comments