Skip to content

Commit ac01691

Browse files
authored
remove XGBoostObjectiveJSON (#2508)
1 parent 6be86f2 commit ac01691

File tree

2 files changed

+16
-24
lines changed

2 files changed

+16
-24
lines changed

pkg/attribute/attribute.go

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -372,9 +372,6 @@ var extractSymbolOnce sync.Once
372372
// OptimizerParamsDocs stores parameters and documents of optimizers
373373
var OptimizerParamsDocs map[string]map[string]string
374374

375-
// XGBoostObjectiveDocs stores options for xgboost objective
376-
var XGBoostObjectiveDocs map[string]string
377-
378375
// ExtractSymbol extracts parameter documents of Python modules from doc strings
379376
func ExtractSymbol(module ...string) {
380377
cmd := exec.Command("python", "-uc", fmt.Sprintf("__import__('symbol_extractor').print_param_doc('%s')", strings.Join(module, "', '")))
@@ -409,8 +406,5 @@ func init() {
409406
if err := json.Unmarshal([]byte(OptimizerParameterJSON), &OptimizerParamsDocs); err != nil {
410407
panic(err) // assertion
411408
}
412-
if err := json.Unmarshal([]byte(XGBoostObjectiveJSON), &XGBoostObjectiveDocs); err != nil {
413-
panic(err)
414-
}
415409
removeUnnecessaryParams()
416410
}

pkg/attribute/xgboost_objective_params.go

Lines changed: 16 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -13,23 +13,21 @@
1313

1414
package attribute
1515

16-
// XGBoostObjectiveJSON is xgboost objective param json extracted
16+
// XGBoostObjectiveDocs is xgboost objective parameter docs extracted
1717
// from https://xgboost.readthedocs.io/en/latest/parameter.html
18-
const XGBoostObjectiveJSON = `
19-
{
20-
"binary:hinge": "hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities.",
21-
"binary:logistic": "logistic regression for binary classification, output probability",
22-
"binary:logitraw": "logistic regression for binary classification, output score before logistic transformation",
23-
"multi:softmax": "set XGBoost to do multiclass classification using the softmax objective, you also need to set num_class(number of classes)",
24-
"multi:softprob": "same as softmax, but output a vector of ndata * nclass, which can be further reshaped to ndata * nclass matrix. The result contains predicted probability of each data point belonging to each class.",
25-
"rank:map": "Use LambdaMART to perform list-wise ranking where Mean Average Precision (MAP) is maximized",
26-
"rank:ndcg": "Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized",
27-
"rank:pairwise": "Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized",
28-
"reg:gamma": "gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be gamma-distributed.",
29-
"reg:logistic": "logistic regression",
30-
"reg:squarederror": "regression with squared loss.",
31-
"reg:squaredlogerror": "regression with squared log loss 1/2[log(pred+1)\u2212log(label+1)]^2",
32-
"reg:tweedie": "Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be Tweedie-distributed.",
33-
"survival:cox": "Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function h(t) = h0(t) * HR)."
18+
var XGBoostObjectiveDocs = map[string]string{
19+
"binary:hinge": "hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities.",
20+
"binary:logistic": "logistic regression for binary classification, output probability",
21+
"binary:logitraw": "logistic regression for binary classification, output score before logistic transformation",
22+
"multi:softmax": "set XGBoost to do multiclass classification using the softmax objective, you also need to set num_class(number of classes)",
23+
"multi:softprob": "same as softmax, but output a vector of ndata * nclass, which can be further reshaped to ndata * nclass matrix. The result contains predicted probability of each data point belonging to each class.",
24+
"rank:map": "Use LambdaMART to perform list-wise ranking where Mean Average Precision (MAP) is maximized",
25+
"rank:ndcg": "Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized",
26+
"rank:pairwise": "Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized",
27+
"reg:gamma": "gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be gamma-distributed.",
28+
"reg:logistic": "logistic regression",
29+
"reg:squarederror": "regression with squared loss.",
30+
"reg:squaredlogerror": "regression with squared log loss 1/2[log(pred+1)\u2212log(label+1)]^2",
31+
"reg:tweedie": "Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be Tweedie-distributed.",
32+
"survival:cox": "Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function h(t) = h0(t) * HR).",
3433
}
35-
`

0 commit comments

Comments
 (0)