Skip to content

Commit 9142213

Browse files
authored
Merge pull request #13104 from jacquesqiao/fix-sparse-grad-merge
fix sparse grad merge on pserver
2 parents 6d39ada + ed511fe commit 9142213

File tree

2 files changed

+6
-8
lines changed

2 files changed

+6
-8
lines changed

python/paddle/fluid/tests/unittests/test_dist_transpiler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -438,7 +438,7 @@ def transpiler_test_impl(self):
438438
# 2 optimize for table adam
439439
# NOTE: if param is not selected rows, the grad will scaled to grad / trainer_num
440440
self.assertEqual([op.type for op in pserver1.blocks[2].ops],
441-
["sum", "adam", "scale", "scale"])
441+
["sum", "scale", "adam", "scale", "scale"])
442442

443443
trainer, _ = self.get_trainer()
444444
self.assertEqual(len(trainer.blocks), 1)

python/paddle/fluid/transpiler/distribute_transpiler.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1390,13 +1390,11 @@ def _append_pserver_grad_merge_ops(self, optimize_block,
13901390
inputs={"X": vars2merge},
13911391
outputs={"Out": merged_var},
13921392
attrs={"use_mkldnn": False})
1393-
# TODO(panyx0718): What if it's SELECTED_ROWS.
1394-
if not merged_var.type == core.VarDesc.VarType.SELECTED_ROWS:
1395-
optimize_block.append_op(
1396-
type="scale",
1397-
inputs={"X": merged_var},
1398-
outputs={"Out": merged_var},
1399-
attrs={"scale": 1.0 / float(self.trainer_num)})
1393+
optimize_block.append_op(
1394+
type="scale",
1395+
inputs={"X": merged_var},
1396+
outputs={"Out": merged_var},
1397+
attrs={"scale": 1.0 / float(self.trainer_num)})
14001398
return merged_var
14011399

14021400
def _append_pserver_ops(self, optimize_block, opt_op, endpoint,

0 commit comments

Comments
 (0)