Skip to content

Commit 5338417

Browse files
committed
Polish code style
1 parent ae39709 commit 5338417

File tree

5 files changed

+10
-13
lines changed

5 files changed

+10
-13
lines changed

paddle/fluid/framework/op_desc.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ void OpDesc::SetAttr(const std::string &name, const Attribute &v) {
209209
if (attr_type == proto::AttrType::INTS &&
210210
boost::get<std::vector<int>>(v).size() == 0u) {
211211
// Find current attr via attr name and set the correct attribute value
212-
const proto::OpProto::Attr& attr = GetProtoAttr(name);
212+
const proto::OpProto::Attr &attr = GetProtoAttr(name);
213213
switch (attr.type()) {
214214
case proto::AttrType::BOOLEANS: {
215215
VLOG(11) << "SetAttr: " << Type() << ", " << name
@@ -275,8 +275,8 @@ Attribute OpDesc::GetAttr(const std::string &name) const {
275275
return it->second;
276276
}
277277

278-
const proto::OpProto::Attr& OpDesc::GetProtoAttr(const std::string &name) {
279-
proto::OpProto& proto = OpInfoMap::Instance().Get(Type()).Proto();
278+
const proto::OpProto::Attr &OpDesc::GetProtoAttr(const std::string &name) {
279+
proto::OpProto &proto = OpInfoMap::Instance().Get(Type()).Proto();
280280
for (int i = 0; i != proto.attrs_size(); ++i) {
281281
const proto::OpProto::Attr &attr = proto.attrs(i);
282282
if (attr.name() == name) {

paddle/fluid/framework/op_desc.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ class OpDesc {
8181

8282
Attribute GetAttr(const std::string &name) const;
8383

84-
const proto::OpProto::Attr& GetProtoAttr(const std::string &name) const;
84+
const proto::OpProto::Attr &GetProtoAttr(const std::string &name) const;
8585

8686
Attribute GetNullableAttr(const std::string &name) const;
8787

python/paddle/fluid/backward.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -364,8 +364,7 @@ def _append_backward_ops_(block,
364364

365365
# Getting op's corresponding grad_op
366366
grad_op_desc, op_grad_to_var = core.get_grad_op_desc(
367-
op.desc,
368-
cpt.to_text(no_grad_dict[block.idx]), grad_sub_block_list)
367+
op.desc, cpt.to_text(no_grad_dict[block.idx]), grad_sub_block_list)
369368

370369
grad_op_descs.extend(grad_op_desc)
371370
grad_to_var.update(op_grad_to_var)

python/paddle/fluid/parallel_executor.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -159,8 +159,7 @@ def __init__(self,
159159
for p in main.global_block().iter_parameters()
160160
if not p.stop_gradient
161161
]),
162-
set(cpt.to_text(var)
163-
for var in self.persistable_vars), main.desc,
162+
set(cpt.to_text(var) for var in self.persistable_vars), main.desc,
164163
cpt.to_text(loss_name)
165164
if loss_name else six.u(''), scope, local_scopes, exec_strategy,
166165
build_strategy, num_trainers, trainer_id)
@@ -274,8 +273,7 @@ def run(self, fetch_list, feed=None, feed_dict=None, return_numpy=True):
274273
self.executor.feed_tensors_into_local_scopes(res)
275274

276275
fetch_var_name = '@FETCHED_VAR_NAME@'
277-
self.executor.run(
278-
cpt.to_text(fetch_list), cpt.to_text(fetch_var_name))
276+
self.executor.run(cpt.to_text(fetch_list), cpt.to_text(fetch_var_name))
279277
arr = self.scope.find_var(fetch_var_name).get_lod_tensor_array()
280278

281279
if self.is_dist:

python/paddle/fluid/transpiler/memory_optimization_transpiler.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -259,9 +259,9 @@ def compare_shape(x_shape, cache_shape, opt_level):
259259
# Rename the var to the cache var already with
260260
# memory allocated in order to reuse the memory.
261261
_rename_arg_(self._ops, x, cache_var, begin_idx=i)
262-
self._program.block(block_desc.id).var(
263-
cpt.to_text(x)).desc = self._find_var(
264-
block_desc, cache_var, is_forward)
262+
self._program.block(block_desc.id).var(cpt.to_text(
263+
x)).desc = self._find_var(block_desc, cache_var,
264+
is_forward)
265265
self._update_graph(x, cache_var, begin_idx=i)
266266
break
267267

0 commit comments

Comments
 (0)