Skip to content

Commit 2e8459b

Browse files
committed
DebugCode
1 parent d752768 commit 2e8459b

File tree

1 file changed

+10
-5
lines changed

1 file changed

+10
-5
lines changed

paddle/fluid/framework/details/nccl_all_reduce_op_handle.cc

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,9 @@ void NCCLAllReduceOpHandle::RunImpl() {
7373

7474
for (size_t i = 0; i < local_scopes_.size(); ++i) {
7575
auto *s = local_scopes_[i];
76+
auto &local_scope = *s->FindVar(kLocalExecScopeName)->Get<Scope *>();
7677

77-
auto &lod_tensor = s->FindVar(var_name)->Get<LoDTensor>();
78+
auto &lod_tensor = local_scope.FindVar(var_name)->Get<LoDTensor>();
7879
lod_tensors.emplace_back(lod_tensor);
7980
}
8081

@@ -110,17 +111,21 @@ void NCCLAllReduceOpHandle::RunImpl() {
110111
}
111112
});
112113
} else { // Special handle CPU only Operator's gradient. Like CRF
113-
auto &trg =
114-
*this->local_scopes_[0]->Var()->GetMutable<framework::LoDTensor>();
114+
auto &trg = *this->local_scopes_[0]
115+
->FindVar(kLocalExecScopeName)
116+
->Get<Scope *>()
117+
->Var()
118+
->GetMutable<framework::LoDTensor>();
115119

116120
// Reduce All Tensor to trg in CPU
117121
ReduceLoDTensor func(lod_tensors, &trg);
118122
VisitDataType(ToDataType(lod_tensors[0].type()), func);
119123

120124
for (size_t i = 0; i < local_scopes_.size(); ++i) {
121-
auto &scope = local_scopes_[i];
125+
auto &scope =
126+
*local_scopes_[i]->FindVar(kLocalExecScopeName)->Get<Scope *>();
122127
auto &p = places_[i];
123-
auto *var = scope->FindVar(var_name);
128+
auto *var = scope.FindVar(var_name);
124129
auto *dev_ctx = dev_ctxes_[p];
125130

126131
RunAndRecordEvent(p, [&trg, var, dev_ctx, p] {

0 commit comments

Comments
 (0)