Skip to content

Commit 773ded6

Browse files
authored
[sycl] Add depth-wise policy (dmlc#10690)
Co-authored-by: Dmitry Razdoburdin <>
1 parent b457d0d commit 773ded6

File tree

3 files changed

+252
-1
lines changed

3 files changed

+252
-1
lines changed

plugin/sycl/tree/hist_updater.cc

Lines changed: 157 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,162 @@ void HistUpdater<GradientSumT>::BuildLocalHistograms(
7979
builder_monitor_.Stop("BuildLocalHistograms");
8080
}
8181

82+
template<typename GradientSumT>
83+
void HistUpdater<GradientSumT>::BuildNodeStats(
84+
const common::GHistIndexMatrix &gmat,
85+
RegTree *p_tree,
86+
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
87+
builder_monitor_.Start("BuildNodeStats");
88+
for (auto const& entry : qexpand_depth_wise_) {
89+
int nid = entry.nid;
90+
this->InitNewNode(nid, gmat, gpair, *p_tree);
91+
// add constraints
92+
if (!(*p_tree)[nid].IsLeftChild() && !(*p_tree)[nid].IsRoot()) {
93+
// it's a right child
94+
auto parent_id = (*p_tree)[nid].Parent();
95+
auto left_sibling_id = (*p_tree)[parent_id].LeftChild();
96+
auto parent_split_feature_id = snode_host_[parent_id].best.SplitIndex();
97+
tree_evaluator_.AddSplit(
98+
parent_id, left_sibling_id, nid, parent_split_feature_id,
99+
snode_host_[left_sibling_id].weight, snode_host_[nid].weight);
100+
interaction_constraints_.Split(parent_id, parent_split_feature_id,
101+
left_sibling_id, nid);
102+
}
103+
}
104+
builder_monitor_.Stop("BuildNodeStats");
105+
}
106+
107+
template<typename GradientSumT>
108+
void HistUpdater<GradientSumT>::AddSplitsToTree(
109+
const common::GHistIndexMatrix &gmat,
110+
RegTree *p_tree,
111+
int *num_leaves,
112+
int depth,
113+
std::vector<ExpandEntry>* nodes_for_apply_split,
114+
std::vector<ExpandEntry>* temp_qexpand_depth) {
115+
builder_monitor_.Start("AddSplitsToTree");
116+
auto evaluator = tree_evaluator_.GetEvaluator();
117+
for (auto const& entry : qexpand_depth_wise_) {
118+
const auto lr = param_.learning_rate;
119+
int nid = entry.nid;
120+
121+
if (snode_host_[nid].best.loss_chg < kRtEps ||
122+
(param_.max_depth > 0 && depth == param_.max_depth) ||
123+
(param_.max_leaves > 0 && (*num_leaves) == param_.max_leaves)) {
124+
(*p_tree)[nid].SetLeaf(snode_host_[nid].weight * lr);
125+
} else {
126+
nodes_for_apply_split->push_back(entry);
127+
128+
NodeEntry<GradientSumT>& e = snode_host_[nid];
129+
bst_float left_leaf_weight =
130+
evaluator.CalcWeight(nid, GradStats<GradientSumT>{e.best.left_sum}) * lr;
131+
bst_float right_leaf_weight =
132+
evaluator.CalcWeight(nid, GradStats<GradientSumT>{e.best.right_sum}) * lr;
133+
p_tree->ExpandNode(nid, e.best.SplitIndex(), e.best.split_value,
134+
e.best.DefaultLeft(), e.weight, left_leaf_weight,
135+
right_leaf_weight, e.best.loss_chg, e.stats.GetHess(),
136+
e.best.left_sum.GetHess(), e.best.right_sum.GetHess());
137+
138+
int left_id = (*p_tree)[nid].LeftChild();
139+
int right_id = (*p_tree)[nid].RightChild();
140+
temp_qexpand_depth->push_back(ExpandEntry(left_id, p_tree->GetDepth(left_id)));
141+
temp_qexpand_depth->push_back(ExpandEntry(right_id, p_tree->GetDepth(right_id)));
142+
// - 1 parent + 2 new children
143+
(*num_leaves)++;
144+
}
145+
}
146+
builder_monitor_.Stop("AddSplitsToTree");
147+
}
148+
149+
150+
template<typename GradientSumT>
151+
void HistUpdater<GradientSumT>::EvaluateAndApplySplits(
152+
const common::GHistIndexMatrix &gmat,
153+
RegTree *p_tree,
154+
int *num_leaves,
155+
int depth,
156+
std::vector<ExpandEntry> *temp_qexpand_depth) {
157+
EvaluateSplits(qexpand_depth_wise_, gmat, *p_tree);
158+
159+
std::vector<ExpandEntry> nodes_for_apply_split;
160+
AddSplitsToTree(gmat, p_tree, num_leaves, depth,
161+
&nodes_for_apply_split, temp_qexpand_depth);
162+
ApplySplit(nodes_for_apply_split, gmat, p_tree);
163+
}
164+
165+
// Split nodes to 2 sets depending on amount of rows in each node
166+
// Histograms for small nodes will be built explicitly
167+
// Histograms for big nodes will be built by 'Subtraction Trick'
168+
// Exception: in distributed setting, we always build the histogram for the left child node
169+
// and use 'Subtraction Trick' to built the histogram for the right child node.
170+
// This ensures that the workers operate on the same set of tree nodes.
171+
template <typename GradientSumT>
172+
void HistUpdater<GradientSumT>::SplitSiblings(
173+
const std::vector<ExpandEntry> &nodes,
174+
std::vector<ExpandEntry> *small_siblings,
175+
std::vector<ExpandEntry> *big_siblings,
176+
RegTree *p_tree) {
177+
builder_monitor_.Start("SplitSiblings");
178+
for (auto const& entry : nodes) {
179+
int nid = entry.nid;
180+
RegTree::Node &node = (*p_tree)[nid];
181+
if (node.IsRoot()) {
182+
small_siblings->push_back(entry);
183+
} else {
184+
const int32_t left_id = (*p_tree)[node.Parent()].LeftChild();
185+
const int32_t right_id = (*p_tree)[node.Parent()].RightChild();
186+
187+
if (nid == left_id && row_set_collection_[left_id ].Size() <
188+
row_set_collection_[right_id].Size()) {
189+
small_siblings->push_back(entry);
190+
} else if (nid == right_id && row_set_collection_[right_id].Size() <=
191+
row_set_collection_[left_id ].Size()) {
192+
small_siblings->push_back(entry);
193+
} else {
194+
big_siblings->push_back(entry);
195+
}
196+
}
197+
}
198+
builder_monitor_.Stop("SplitSiblings");
199+
}
200+
201+
template<typename GradientSumT>
202+
void HistUpdater<GradientSumT>::ExpandWithDepthWise(
203+
const common::GHistIndexMatrix &gmat,
204+
RegTree *p_tree,
205+
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
206+
int num_leaves = 0;
207+
208+
// in depth_wise growing, we feed loss_chg with 0.0 since it is not used anyway
209+
qexpand_depth_wise_.emplace_back(ExpandEntry::kRootNid,
210+
p_tree->GetDepth(ExpandEntry::kRootNid));
211+
++num_leaves;
212+
for (int depth = 0; depth < param_.max_depth + 1; depth++) {
213+
std::vector<int> sync_ids;
214+
std::vector<ExpandEntry> temp_qexpand_depth;
215+
SplitSiblings(qexpand_depth_wise_, &nodes_for_explicit_hist_build_,
216+
&nodes_for_subtraction_trick_, p_tree);
217+
hist_rows_adder_->AddHistRows(this, &sync_ids, p_tree);
218+
BuildLocalHistograms(gmat, p_tree, gpair);
219+
hist_synchronizer_->SyncHistograms(this, sync_ids, p_tree);
220+
BuildNodeStats(gmat, p_tree, gpair);
221+
222+
EvaluateAndApplySplits(gmat, p_tree, &num_leaves, depth,
223+
&temp_qexpand_depth);
224+
225+
// clean up
226+
qexpand_depth_wise_.clear();
227+
nodes_for_subtraction_trick_.clear();
228+
nodes_for_explicit_hist_build_.clear();
229+
if (temp_qexpand_depth.empty()) {
230+
break;
231+
} else {
232+
qexpand_depth_wise_ = temp_qexpand_depth;
233+
temp_qexpand_depth.clear();
234+
}
235+
}
236+
}
237+
82238
template<typename GradientSumT>
83239
void HistUpdater<GradientSumT>::ExpandWithLossGuide(
84240
const common::GHistIndexMatrix& gmat,
@@ -326,7 +482,7 @@ void HistUpdater<GradientSumT>::InitData(
326482
if (param_.grow_policy == xgboost::tree::TrainParam::kLossGuide) {
327483
qexpand_loss_guided_.reset(new ExpandQueue(LossGuide));
328484
} else {
329-
LOG(WARNING) << "Depth-wise building is not yet implemented";
485+
qexpand_depth_wise_.clear();
330486
}
331487
}
332488
builder_monitor_.Stop("InitData");

plugin/sycl/tree/hist_updater.h

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,36 @@ class HistUpdater {
129129
const USMVector<GradientPair, MemoryType::on_device> &gpair,
130130
const RegTree& tree);
131131

132+
// Split nodes to 2 sets depending on amount of rows in each node
133+
// Histograms for small nodes will be built explicitly
134+
// Histograms for big nodes will be built by 'Subtraction Trick'
135+
void SplitSiblings(const std::vector<ExpandEntry>& nodes,
136+
std::vector<ExpandEntry>* small_siblings,
137+
std::vector<ExpandEntry>* big_siblings,
138+
RegTree *p_tree);
139+
140+
void BuildNodeStats(const common::GHistIndexMatrix &gmat,
141+
RegTree *p_tree,
142+
const USMVector<GradientPair, MemoryType::on_device> &gpair);
143+
144+
void EvaluateAndApplySplits(const common::GHistIndexMatrix &gmat,
145+
RegTree *p_tree,
146+
int *num_leaves,
147+
int depth,
148+
std::vector<ExpandEntry> *temp_qexpand_depth);
149+
150+
void AddSplitsToTree(
151+
const common::GHistIndexMatrix &gmat,
152+
RegTree *p_tree,
153+
int *num_leaves,
154+
int depth,
155+
std::vector<ExpandEntry>* nodes_for_apply_split,
156+
std::vector<ExpandEntry>* temp_qexpand_depth);
157+
158+
void ExpandWithDepthWise(const common::GHistIndexMatrix &gmat,
159+
RegTree *p_tree,
160+
const USMVector<GradientPair, MemoryType::on_device> &gpair);
161+
132162
void BuildLocalHistograms(const common::GHistIndexMatrix &gmat,
133163
RegTree *p_tree,
134164
const USMVector<GradientPair, MemoryType::on_device> &gpair);
@@ -180,6 +210,7 @@ class HistUpdater {
180210
std::function<bool(ExpandEntry, ExpandEntry)>>;
181211

182212
std::unique_ptr<ExpandQueue> qexpand_loss_guided_;
213+
std::vector<ExpandEntry> qexpand_depth_wise_;
183214

184215
enum DataLayout { kDenseDataZeroBased, kDenseDataOneBased, kSparseData };
185216
DataLayout data_layout_;

tests/cpp/plugin/test_sycl_hist_updater.cc

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,13 @@ class TestHistUpdater : public HistUpdater<GradientSumT> {
7575
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
7676
HistUpdater<GradientSumT>::ExpandWithLossGuide(gmat, p_tree, gpair);
7777
}
78+
79+
auto TestExpandWithDepthWise(const common::GHistIndexMatrix& gmat,
80+
DMatrix *p_fmat,
81+
RegTree* p_tree,
82+
const USMVector<GradientPair, MemoryType::on_device> &gpair) {
83+
HistUpdater<GradientSumT>::ExpandWithDepthWise(gmat, p_tree, gpair);
84+
}
7885
};
7986

8087
void GenerateRandomGPairs(::sycl::queue* qu, GradientPair* gpair_ptr, size_t num_rows, bool has_neg_hess) {
@@ -544,6 +551,55 @@ void TestHistUpdaterExpandWithLossGuide(const xgboost::tree::TrainParam& param)
544551
}
545552

546553

554+
template <typename GradientSumT>
555+
void TestHistUpdaterExpandWithDepthWise(const xgboost::tree::TrainParam& param) {
556+
const size_t num_rows = 3;
557+
const size_t num_columns = 1;
558+
const size_t n_bins = 16;
559+
560+
Context ctx;
561+
ctx.UpdateAllowUnknown(Args{{"device", "sycl"}});
562+
563+
DeviceManager device_manager;
564+
auto qu = device_manager.GetQueue(ctx.Device());
565+
566+
std::vector<float> data = {7, 3, 15};
567+
auto p_fmat = GetDMatrixFromData(data, num_rows, num_columns);
568+
569+
DeviceMatrix dmat;
570+
dmat.Init(qu, p_fmat.get());
571+
common::GHistIndexMatrix gmat;
572+
gmat.Init(qu, &ctx, dmat, n_bins);
573+
574+
std::vector<GradientPair> gpair_host = {{1, 2}, {3, 1}, {1, 1}};
575+
USMVector<GradientPair, MemoryType::on_device> gpair(&qu, gpair_host);
576+
577+
RegTree tree;
578+
FeatureInteractionConstraintHost int_constraints;
579+
ObjInfo task{ObjInfo::kRegression};
580+
std::unique_ptr<TreeUpdater> pruner{TreeUpdater::Create("prune", &ctx, &task)};
581+
TestHistUpdater<GradientSumT> updater(&ctx, qu, param, std::move(pruner), int_constraints, p_fmat.get());
582+
updater.SetHistSynchronizer(new BatchHistSynchronizer<GradientSumT>());
583+
updater.SetHistRowsAdder(new BatchHistRowsAdder<GradientSumT>());
584+
auto* row_set_collection = updater.TestInitData(gmat, gpair, *p_fmat, tree);
585+
586+
updater.TestExpandWithDepthWise(gmat, p_fmat.get(), &tree, gpair);
587+
588+
const auto& nodes = tree.GetNodes();
589+
std::vector<float> ans(data.size());
590+
for (size_t data_idx = 0; data_idx < data.size(); ++data_idx) {
591+
size_t node_idx = 0;
592+
while (!nodes[node_idx].IsLeaf()) {
593+
node_idx = data[data_idx] < nodes[node_idx].SplitCond() ? nodes[node_idx].LeftChild() : nodes[node_idx].RightChild();
594+
}
595+
ans[data_idx] = nodes[node_idx].LeafValue();
596+
}
597+
598+
ASSERT_NEAR(ans[0], -0.15, 1e-6);
599+
ASSERT_NEAR(ans[1], -0.45, 1e-6);
600+
ASSERT_NEAR(ans[2], -0.15, 1e-6);
601+
}
602+
547603
TEST(SyclHistUpdater, Sampling) {
548604
xgboost::tree::TrainParam param;
549605
param.UpdateAllowUnknown(Args{{"subsample", "0.7"}});
@@ -620,4 +676,12 @@ TEST(SyclHistUpdater, ExpandWithLossGuide) {
620676
TestHistUpdaterExpandWithLossGuide<double>(param);
621677
}
622678

679+
TEST(SyclHistUpdater, ExpandWithDepthWise) {
680+
xgboost::tree::TrainParam param;
681+
param.UpdateAllowUnknown(Args{{"max_depth", "2"}});
682+
683+
TestHistUpdaterExpandWithDepthWise<float>(param);
684+
TestHistUpdaterExpandWithDepthWise<double>(param);
685+
}
686+
623687
} // namespace xgboost::sycl::tree

0 commit comments

Comments
 (0)