Skip to content

Commit 2aad01f

Browse files
authored
Merge pull request #13132 from jacquesqiao/cherry-pick-pserver-merge-selected-rows
Cherry pick pserver merge selected rows
2 parents 43ee997 + bfa230c commit 2aad01f

File tree

2 files changed

+6
-8
lines changed

2 files changed

+6
-8
lines changed

python/paddle/fluid/tests/unittests/test_dist_transpiler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -440,7 +440,7 @@ def transpiler_test_impl(self):
440440
# 2 optimize for table adam
441441
# NOTE: if param is not selected rows, the grad will scaled to grad / trainer_num
442442
self.assertEqual([op.type for op in pserver1.blocks[2].ops],
443-
["sum", "adam", "scale", "scale"])
443+
["sum", "scale", "adam", "scale", "scale"])
444444

445445
trainer, _ = self.get_trainer()
446446
self.assertEqual(len(trainer.blocks), 1)

python/paddle/fluid/transpiler/distribute_transpiler.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1287,13 +1287,11 @@ def _append_pserver_grad_merge_ops(self, optimize_block,
12871287
inputs={"X": vars2merge},
12881288
outputs={"Out": merged_var},
12891289
attrs={"use_mkldnn": False})
1290-
# TODO(panyx0718): What if it's SELECTED_ROWS.
1291-
if not merged_var.type == core.VarDesc.VarType.SELECTED_ROWS:
1292-
optimize_block.append_op(
1293-
type="scale",
1294-
inputs={"X": merged_var},
1295-
outputs={"Out": merged_var},
1296-
attrs={"scale": 1.0 / float(self.trainer_num)})
1290+
optimize_block.append_op(
1291+
type="scale",
1292+
inputs={"X": merged_var},
1293+
outputs={"Out": merged_var},
1294+
attrs={"scale": 1.0 / float(self.trainer_num)})
12971295
return merged_var
12981296

12991297
def _append_pserver_ops(self, optimize_block, opt_op, endpoint,

0 commit comments

Comments
 (0)