Skip to content

Commit 4bb612e

Browse files
YanceyYancey0623
authored andcommitted
Merge pull request #11702 from Yancey1989/fix_async_update_failed
Fix async update failed
1 parent fac1d47 commit 4bb612e

File tree

3 files changed

+13
-14
lines changed

3 files changed

+13
-14
lines changed

paddle/fluid/operators/listen_and_serv_op.cc

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,8 @@ void ListenAndServOp::RunSyncLoop(
164164
}
165165

166166
void ListenAndServOp::RunAsyncLoop(framework::Executor *executor,
167-
framework::ProgramDesc *program) const {
167+
framework::ProgramDesc *program,
168+
framework::Scope *recv_scope) const {
168169
// grad name to block id
169170
std::unordered_map<std::string, int32_t> grad_to_block_id;
170171
std::unordered_map<int32_t, std::string> id_to_grad;
@@ -191,6 +192,10 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor,
191192
block_list.push_back(blkid);
192193
}
193194
auto optimize_prepared = executor->Prepare(*program, block_list);
195+
// execute global block if needed
196+
if (block_list[0] == 1 && id_to_grad.count(1) == 0) {
197+
executor->RunPreparedContext(optimize_prepared[0].get(), recv_scope);
198+
}
194199
std::unordered_map<std::string,
195200
std::shared_ptr<framework::ExecutorPrepareContext>>
196201
grad_to_prepared_ctx;
@@ -315,7 +320,7 @@ void ListenAndServOp::RunImpl(const framework::Scope &scope,
315320
if (sync_mode) {
316321
RunSyncLoop(&executor, program, &recv_scope, prefetch_block_id_list);
317322
} else {
318-
RunAsyncLoop(&executor, program);
323+
RunAsyncLoop(&executor, program, &recv_scope);
319324
}
320325
}
321326

paddle/fluid/operators/listen_and_serv_op.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,8 @@ class ListenAndServOp : public framework::OperatorBase {
5050
const std::vector<int>& prefetch_block_id_list) const;
5151

5252
void RunAsyncLoop(framework::Executor* executor,
53-
framework::ProgramDesc* program) const;
53+
framework::ProgramDesc* program,
54+
framework::Scope* recv_scope) const;
5455

5556
void SavePort() const;
5657

python/paddle/fluid/transpiler/distribute_transpiler.py

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1293,16 +1293,6 @@ def _create_ufind(self, optimize_ops):
12931293
ufind.union(op1, op2)
12941294
return ufind
12951295

1296-
def _is_opt_role_op(self, op):
1297-
# NOTE: depend on oprole to find out whether this op is for
1298-
# optimize
1299-
op_maker = core.op_proto_and_checker_maker
1300-
optimize_role = core.op_proto_and_checker_maker.OpRole.Optimize
1301-
if op_maker.kOpRoleAttrName() in op.attrs and \
1302-
int(op.attrs[op_maker.kOpRoleAttrName()]) == int(optimize_role):
1303-
return True
1304-
return False
1305-
13061296
def _is_optimizer_op(self, op):
13071297
if "Param" in op.input_names and \
13081298
"LearningRate" in op.input_names:
@@ -1393,7 +1383,10 @@ def _get_optimize_pass(self):
13931383
params_grads = []
13941384
origin_var_dict = self.origin_program.global_block().vars
13951385
for op in block.ops:
1396-
if self._is_opt_role_op(op):
1386+
# NOTE(Yancey1989): we can not use op role to distinguish an optimizer op
1387+
# or not, because all ops in optimizer sub-graph would
1388+
# sign the optimizer op role
1389+
if self._is_optimizer_op(op):
13971390
opt_ops.append(op)
13981391
# HACK(wuyi): if we find grad vars from input of optimize
13991392
# ops, we may get the output of clip op. Use syntax "@GRAD"

0 commit comments

Comments
 (0)