Skip to content

Commit 63bd38b

Browse files
committed
code optimize
1 parent 63bf82d commit 63bd38b

File tree

4 files changed

+19
-13
lines changed

4 files changed

+19
-13
lines changed

paddle/fluid/operators/detail/variable_response.h

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,9 @@ class VariableResponse {
6363
// other: number of error field.
6464
int Parse(const ::grpc::ByteBuffer& byte_buffer);
6565

66-
framework::Scope& GetLocalScope() const { return *local_scope_; }
66+
const framework::Scope& GetLocalScope() const { return *local_scope_; }
67+
68+
framework::Scope* GetMutableLocalScope() const { return local_scope_; }
6769

6870
inline std::string Varname() { return meta_.varname(); }
6971
inline std::string OutVarname() { return meta_.out_varname(); }

paddle/fluid/operators/listen_and_serv_op.cc

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -207,18 +207,19 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor,
207207
framework::BlockDesc *prefetch_block) const {
208208
VLOG(3) << "RunAsyncLoop in";
209209
// grad name to block id
210-
std::unordered_map<std::string, int32_t> grad_to_id;
210+
std::unordered_map<std::string, int32_t> grad_to_block_id;
211211
std::unordered_map<int32_t, std::string> id_to_grad;
212212

213-
auto grad_to_id_str = Attr<std::vector<std::string>>("grad_to_id");
214-
for (auto &grad_and_id : grad_to_id_str) {
213+
auto grad_to_block_id_str =
214+
Attr<std::vector<std::string>>("grad_to_block_id");
215+
for (auto &grad_and_id : grad_to_block_id_str) {
215216
std::vector<std::string> pieces;
216217
split(grad_and_id, ':', &pieces);
217218
VLOG(3) << "after split, grad = " << pieces[0] << ", id=" << pieces[1];
218219
PADDLE_ENFORCE_EQ(pieces.size(), 2);
219-
PADDLE_ENFORCE_EQ(grad_to_id.count(pieces[0]), 0);
220+
PADDLE_ENFORCE_EQ(grad_to_block_id.count(pieces[0]), 0);
220221
int block_id = std::stoi(pieces[1]);
221-
grad_to_id[pieces[0]] = block_id;
222+
grad_to_block_id[pieces[0]] = block_id;
222223
id_to_grad[block_id] = pieces[0];
223224
}
224225
size_t num_blocks = program->Size();
@@ -232,9 +233,9 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor,
232233
auto optimize_prepared = executor->Prepare(*program, block_list);
233234
std::unordered_map<std::string,
234235
std::shared_ptr<framework::ExecutorPrepareContext>>
235-
grad_to_prepared;
236+
grad_to_prepared_block;
236237
for (size_t i = 0; i < block_list.size(); ++i) {
237-
grad_to_prepared[id_to_grad[block_list[i]]] = optimize_prepared[i];
238+
grad_to_prepared_block[id_to_grad[block_list[i]]] = optimize_prepared[i];
238239
}
239240

240241
VLOG(3) << "RunAsyncLoop into while";
@@ -253,8 +254,8 @@ void ListenAndServOp::RunAsyncLoop(framework::Executor *executor,
253254
LOG(ERROR) << "Can not find server side var: " << recv_var_name;
254255
PADDLE_THROW("Can not find server side var");
255256
}
256-
AsyncExecuteBlock(executor, grad_to_prepared[recv_var_name].get(),
257-
&(v.second->GetLocalScope()));
257+
AsyncExecuteBlock(executor, grad_to_prepared_block[recv_var_name].get(),
258+
v.second->GetMutableLocalScope());
258259
// TODO(qiao): explain why
259260
if (var->IsType<framework::SelectedRows>()) {
260261
var->GetMutable<framework::SelectedRows>()->mutable_rows()->clear();
@@ -328,7 +329,7 @@ from send_op and send back variables to recv_op.
328329
.SetDefault("127.0.0.1:6164")
329330
.AddCustomChecker([](const std::string &ip) { return !ip.empty(); });
330331
AddAttr<std::vector<std::string>>(
331-
"grad_to_id",
332+
"grad_to_block_id",
332333
333334
"a map from grad name to it's optimize block id")
334335
.SetDefault({});

paddle/fluid/operators/send_recv_op_test.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ void StartServerNet(bool is_sparse) {
137137
attrs.insert({"GradList", std::vector<std::string>({"x1"})});
138138
attrs.insert({"OptimizeBlock", optimize_block});
139139
attrs.insert({"PrefetchBlock", prefetch_block});
140-
attrs.insert({"grad_to_id", std::vector<std::string>({""})});
140+
attrs.insert({"grad_to_block_id", std::vector<std::string>({""})});
141141
attrs.insert({"sync_mode", true});
142142
listen_and_serv_op =
143143
f::OpRegistry::CreateOp("listen_and_serv", {{"X", {"x1"}}}, {}, attrs);

python/paddle/fluid/distribute_transpiler.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,9 @@ def transpile(self,
185185
:param split_method: A function to determin how to split variables
186186
to different servers equally.
187187
:type split_method: function
188+
:type sync_mode: boolean default True
189+
:param sync_mode: if sync_mode is set True, it means that dist transpiler
190+
will transpile the program into sync_mode pserver and trainer program.
188191
"""
189192
assert (callable(split_method))
190193
if program is None:
@@ -479,7 +482,7 @@ def __append_optimize_op__(op, block, grad_to_block_id):
479482
"Fanin": self.trainer_num,
480483
"PrefetchBlock": prefetch_block,
481484
"sync_mode": self.sync_mode,
482-
"grad_to_id": grad_to_block_id
485+
"grad_to_block_id": grad_to_block_id
483486
})
484487

485488
pserver_program.sync_with_cpp()

0 commit comments

Comments
 (0)