Skip to content

Commit ace7713

Browse files
authored
[backport] Fix default metric configuration. (dmlc#9575) (dmlc#9590)
1 parent 096047c commit ace7713

File tree

6 files changed

+105
-25
lines changed

6 files changed

+105
-25
lines changed

src/learner.cc

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1317,7 +1317,9 @@ class LearnerImpl : public LearnerIO {
13171317
if (metrics_.empty() && tparam_.disable_default_eval_metric <= 0) {
13181318
metrics_.emplace_back(Metric::Create(obj_->DefaultEvalMetric(), &ctx_));
13191319
auto config = obj_->DefaultMetricConfig();
1320-
metrics_.back()->LoadConfig(config);
1320+
if (!IsA<Null>(config)) {
1321+
metrics_.back()->LoadConfig(config);
1322+
}
13211323
metrics_.back()->Configure({cfg_.begin(), cfg_.end()});
13221324
}
13231325

src/objective/regression_obj.cu

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -268,6 +268,13 @@ class PseudoHuberRegression : public FitIntercept {
268268
}
269269
FromJson(in["pseudo_huber_param"], &param_);
270270
}
271+
[[nodiscard]] Json DefaultMetricConfig() const override {
272+
CHECK(param_.GetInitialised());
273+
Json config{Object{}};
274+
config["name"] = String{this->DefaultEvalMetric()};
275+
config["pseudo_huber_param"] = ToJson(param_);
276+
return config;
277+
}
271278
};
272279

273280
XGBOOST_REGISTER_OBJECTIVE(PseudoHuberRegression, "reg:pseudohubererror")

tests/cpp/objective/test_objective.cc

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
#include <xgboost/objective.h>
77

88
#include "../helpers.h"
9+
#include "../objective_helpers.h"
910

1011
TEST(Objective, UnknownFunction) {
1112
xgboost::ObjFunction* obj = nullptr;
@@ -43,4 +44,61 @@ TEST(Objective, PredTransform) {
4344
ASSERT_TRUE(predts.HostCanWrite());
4445
}
4546
}
47+
48+
class TestDefaultObjConfig : public ::testing::TestWithParam<std::string> {
49+
Context ctx_;
50+
51+
public:
52+
void Run(std::string objective) {
53+
auto Xy = MakeFmatForObjTest(objective);
54+
std::unique_ptr<Learner> learner{Learner::Create({Xy})};
55+
std::unique_ptr<ObjFunction> objfn{ObjFunction::Create(objective, &ctx_)};
56+
57+
learner->SetParam("objective", objective);
58+
if (objective.find("multi") != std::string::npos) {
59+
learner->SetParam("num_class", "3");
60+
objfn->Configure(Args{{"num_class", "3"}});
61+
} else if (objective.find("quantile") != std::string::npos) {
62+
learner->SetParam("quantile_alpha", "0.5");
63+
objfn->Configure(Args{{"quantile_alpha", "0.5"}});
64+
} else {
65+
objfn->Configure(Args{});
66+
}
67+
learner->Configure();
68+
learner->UpdateOneIter(0, Xy);
69+
learner->EvalOneIter(0, {Xy}, {"train"});
70+
Json config{Object{}};
71+
learner->SaveConfig(&config);
72+
auto jobj = get<Object const>(config["learner"]["objective"]);
73+
74+
ASSERT_TRUE(jobj.find("name") != jobj.cend());
75+
// FIXME(jiamingy): We should have the following check, but some legacy parameter like
76+
// "pos_weight", "delta_step" in objectives are not in metrics.
77+
78+
// if (jobj.size() > 1) {
79+
// ASSERT_FALSE(IsA<Null>(objfn->DefaultMetricConfig()));
80+
// }
81+
auto mconfig = objfn->DefaultMetricConfig();
82+
if (!IsA<Null>(mconfig)) {
83+
// make sure metric can handle it
84+
std::unique_ptr<Metric> metricfn{Metric::Create(get<String const>(mconfig["name"]), &ctx_)};
85+
metricfn->LoadConfig(mconfig);
86+
Json loaded(Object{});
87+
metricfn->SaveConfig(&loaded);
88+
metricfn->Configure(Args{});
89+
ASSERT_EQ(mconfig, loaded);
90+
}
91+
}
92+
};
93+
94+
TEST_P(TestDefaultObjConfig, Objective) {
95+
std::string objective = GetParam();
96+
this->Run(objective);
97+
}
98+
99+
INSTANTIATE_TEST_SUITE_P(Objective, TestDefaultObjConfig,
100+
::testing::ValuesIn(MakeObjNamesForTest()),
101+
[](const ::testing::TestParamInfo<TestDefaultObjConfig::ParamType>& info) {
102+
return ObjTestNameGenerator(info);
103+
});
46104
} // namespace xgboost

tests/cpp/objective_helpers.cc

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
/**
2+
* Copyright (c) 2023, XGBoost contributors
3+
*/
4+
#include "objective_helpers.h"
5+
6+
#include "../../src/common/linalg_op.h" // for begin, end
7+
#include "helpers.h" // for RandomDataGenerator
8+
9+
namespace xgboost {
10+
std::shared_ptr<DMatrix> MakeFmatForObjTest(std::string const& obj) {
11+
auto constexpr kRows = 10, kCols = 10;
12+
auto p_fmat = RandomDataGenerator{kRows, kCols, 0}.GenerateDMatrix(true);
13+
auto& h_upper = p_fmat->Info().labels_upper_bound_.HostVector();
14+
auto& h_lower = p_fmat->Info().labels_lower_bound_.HostVector();
15+
h_lower.resize(kRows);
16+
h_upper.resize(kRows);
17+
for (size_t i = 0; i < kRows; ++i) {
18+
h_lower[i] = 1;
19+
h_upper[i] = 10;
20+
}
21+
if (obj.find("rank:") != std::string::npos) {
22+
auto h_label = p_fmat->Info().labels.HostView();
23+
std::size_t k = 0;
24+
for (auto& v : h_label) {
25+
v = k % 2 == 0;
26+
++k;
27+
}
28+
}
29+
return p_fmat;
30+
};
31+
} // namespace xgboost

tests/cpp/objective_helpers.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
/**
22
* Copyright (c) 2023, XGBoost contributors
33
*/
4+
#pragma once
5+
46
#include <dmlc/registry.h> // for Registry
57
#include <gtest/gtest.h>
68
#include <xgboost/objective.h> // for ObjFunctionReg
@@ -29,4 +31,6 @@ inline std::string ObjTestNameGenerator(const ::testing::TestParamInfo<ParamType
2931
}
3032
return name;
3133
};
34+
35+
std::shared_ptr<DMatrix> MakeFmatForObjTest(std::string const& obj);
3236
} // namespace xgboost

tests/cpp/test_learner.cc

Lines changed: 2 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -655,33 +655,11 @@ TEST_F(InitBaseScore, InitWithPredict) { this->TestInitWithPredt(); }
655655
TEST_F(InitBaseScore, UpdateProcess) { this->TestUpdateProcess(); }
656656

657657
class TestColumnSplit : public ::testing::TestWithParam<std::string> {
658-
static auto MakeFmat(std::string const& obj) {
659-
auto constexpr kRows = 10, kCols = 10;
660-
auto p_fmat = RandomDataGenerator{kRows, kCols, 0}.GenerateDMatrix(true);
661-
auto& h_upper = p_fmat->Info().labels_upper_bound_.HostVector();
662-
auto& h_lower = p_fmat->Info().labels_lower_bound_.HostVector();
663-
h_lower.resize(kRows);
664-
h_upper.resize(kRows);
665-
for (size_t i = 0; i < kRows; ++i) {
666-
h_lower[i] = 1;
667-
h_upper[i] = 10;
668-
}
669-
if (obj.find("rank:") != std::string::npos) {
670-
auto h_label = p_fmat->Info().labels.HostView();
671-
std::size_t k = 0;
672-
for (auto& v : h_label) {
673-
v = k % 2 == 0;
674-
++k;
675-
}
676-
}
677-
return p_fmat;
678-
};
679-
680658
void TestBaseScore(std::string objective, float expected_base_score, Json expected_model) {
681659
auto const world_size = collective::GetWorldSize();
682660
auto const rank = collective::GetRank();
683661

684-
auto p_fmat = MakeFmat(objective);
662+
auto p_fmat = MakeFmatForObjTest(objective);
685663
std::shared_ptr<DMatrix> sliced{p_fmat->SliceCol(world_size, rank)};
686664
std::unique_ptr<Learner> learner{Learner::Create({sliced})};
687665
learner->SetParam("tree_method", "approx");
@@ -705,7 +683,7 @@ class TestColumnSplit : public ::testing::TestWithParam<std::string> {
705683

706684
public:
707685
void Run(std::string objective) {
708-
auto p_fmat = MakeFmat(objective);
686+
auto p_fmat = MakeFmatForObjTest(objective);
709687
std::unique_ptr<Learner> learner{Learner::Create({p_fmat})};
710688
learner->SetParam("tree_method", "approx");
711689
learner->SetParam("objective", objective);

0 commit comments

Comments
 (0)