未验证 提交 60a6d68f 编写于 作者: A Aurelius84 提交者: GitHub

remove _optimized_guard in dygrahpe_mode (#22143)

* remove _optimized_guard in dygrahpe_mode test=develop

* remove comment code test=develop

* remove list append test=develop

* remove list append test=develop
上级 a2603c5b
...@@ -441,17 +441,12 @@ class Optimizer(object): ...@@ -441,17 +441,12 @@ class Optimizer(object):
[p[0] for p in parameters_and_grads if p[0].trainable]) [p[0] for p in parameters_and_grads if p[0].trainable])
self._create_global_learning_rate() self._create_global_learning_rate()
optimize_ops = []
if framework.in_dygraph_mode(): if framework.in_dygraph_mode():
for param_and_grad in parameters_and_grads: for param_and_grad in parameters_and_grads:
if param_and_grad[1] is None: if param_and_grad[1] is None:
continue continue
with param_and_grad[0].block.program._optimized_guard(
param_and_grad):
if param_and_grad[0].trainable is True: if param_and_grad[0].trainable is True:
optimize_op = self._append_optimize_op(target_block, self._append_optimize_op(target_block, param_and_grad)
param_and_grad)
optimize_ops.append(optimize_op)
else: else:
for param_and_grad in parameters_and_grads: for param_and_grad in parameters_and_grads:
if param_and_grad[1] is None: if param_and_grad[1] is None:
...@@ -459,9 +454,7 @@ class Optimizer(object): ...@@ -459,9 +454,7 @@ class Optimizer(object):
with param_and_grad[0].block.program._optimized_guard( with param_and_grad[0].block.program._optimized_guard(
param_and_grad), name_scope("optimizer"): param_and_grad), name_scope("optimizer"):
if param_and_grad[0].trainable is True: if param_and_grad[0].trainable is True:
optimize_op = self._append_optimize_op(target_block, self._append_optimize_op(target_block, param_and_grad)
param_and_grad)
optimize_ops.append(optimize_op)
# Get custom finish ops for subclasses # Get custom finish ops for subclasses
# FIXME: Need to fix this once we figure out how to handle dependencies # FIXME: Need to fix this once we figure out how to handle dependencies
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册