未验证 提交 91422138 编写于 作者: Q Qiao Longfei 提交者: GitHub

Merge pull request #13104 from jacquesqiao/fix-sparse-grad-merge

fix sparse grad merge on pserver
...@@ -438,7 +438,7 @@ class TestLocalLookupTable(TestDistLookupTableBase): ...@@ -438,7 +438,7 @@ class TestLocalLookupTable(TestDistLookupTableBase):
# 2 optimize for table adam # 2 optimize for table adam
# NOTE: if param is not selected rows, the grad will scaled to grad / trainer_num # NOTE: if param is not selected rows, the grad will scaled to grad / trainer_num
self.assertEqual([op.type for op in pserver1.blocks[2].ops], self.assertEqual([op.type for op in pserver1.blocks[2].ops],
["sum", "adam", "scale", "scale"]) ["sum", "scale", "adam", "scale", "scale"])
trainer, _ = self.get_trainer() trainer, _ = self.get_trainer()
self.assertEqual(len(trainer.blocks), 1) self.assertEqual(len(trainer.blocks), 1)
......
...@@ -1390,13 +1390,11 @@ class DistributeTranspiler(object): ...@@ -1390,13 +1390,11 @@ class DistributeTranspiler(object):
inputs={"X": vars2merge}, inputs={"X": vars2merge},
outputs={"Out": merged_var}, outputs={"Out": merged_var},
attrs={"use_mkldnn": False}) attrs={"use_mkldnn": False})
# TODO(panyx0718): What if it's SELECTED_ROWS. optimize_block.append_op(
if not merged_var.type == core.VarDesc.VarType.SELECTED_ROWS: type="scale",
optimize_block.append_op( inputs={"X": merged_var},
type="scale", outputs={"Out": merged_var},
inputs={"X": merged_var}, attrs={"scale": 1.0 / float(self.trainer_num)})
outputs={"Out": merged_var},
attrs={"scale": 1.0 / float(self.trainer_num)})
return merged_var return merged_var
def _append_pserver_ops(self, optimize_block, opt_op, endpoint, def _append_pserver_ops(self, optimize_block, opt_op, endpoint,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册