Skip to content

Commit f816e11

Browse files
author
Yang Yang
committed
clean up
1 parent e443d03 commit f816e11

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

paddle/operators/parallel_do_op.cc

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,6 @@ class ParallelDoOp : public framework::OperatorBase {
174174
lod_tensor_to_be_merged->MergeLoDTensor(lod_tensors, dev_ctx.GetPlace());
175175
}
176176
WaitOnPlaces(places);
177-
LOG(INFO) << "End of ParallelGradDo";
178177
}
179178
};
180179

@@ -237,7 +236,6 @@ class ParallelDoGradOp : public framework::OperatorBase {
237236
WaitOnPlaces(places);
238237

239238
AccumulateGrad(scope, place, sub_scopes, places);
240-
LOG(INFO) << "End of ParallelDoGrad";
241239
}
242240

243241
void AccumulateGrad(const framework::Scope &scope,
@@ -248,23 +246,19 @@ class ParallelDoGradOp : public framework::OperatorBase {
248246
std::__cxx11::string tmp_name;
249247
auto *tmp = sub_scopes[0]->Var(&tmp_name);
250248

251-
LOG(INFO) << "---" << s;
252249
for (size_t i = 1; i < sub_scopes.size(); ++i) {
253250
if (!(places[i] == places[0])) {
254-
LOG(INFO) << "---";
255251
CopyOrShare(*sub_scopes[i]->FindVar(s), places[0], tmp);
256252
WaitOnPlace(places[0]);
257253
}
258254

259-
LOG(INFO) << "---";
260255
auto sum_op = framework::OpRegistry::CreateOp(
261256
"sum", {{"X", {s, tmp_name}}}, {{"Out", {s}}},
262257
framework::AttributeMap{});
263258
sum_op->Run(*sub_scopes[0], places[0]);
264259
WaitOnPlace(places[0]);
265260
}
266261

267-
LOG(INFO) << "---";
268262
CopyOrShare(*sub_scopes[0]->FindVar(s), place, scope.FindVar(s));
269263
}
270264
WaitOnPlaces(places);

0 commit comments

Comments
 (0)