提交 d8220ccb 编写于 作者: Q qiaolongfei

add optimized_guard for optimizer finish_update

上级 d99775a7
...@@ -570,6 +570,7 @@ class AdamOptimizer(Optimizer): ...@@ -570,6 +570,7 @@ class AdamOptimizer(Optimizer):
assert isinstance(block, framework.Block) assert isinstance(block, framework.Block)
main_block = block.program.global_block() main_block = block.program.global_block()
for param in parameters: for param in parameters:
with param.block.program.optimized_guard(param):
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
param) param)
beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str,
...@@ -696,6 +697,7 @@ class AdamaxOptimizer(Optimizer): ...@@ -696,6 +697,7 @@ class AdamaxOptimizer(Optimizer):
assert isinstance(block, framework.Block) assert isinstance(block, framework.Block)
main_block = block.program.global_block() main_block = block.program.global_block()
for param in parameters: for param in parameters:
with param.block.program.optimized_guard(param):
beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,
param) param)
main_block.append_op( main_block.append_op(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册