提交 1c19f1ab 编写于 作者: Y yuyang18

Do not change API in doc PR

上级 7747e01b
......@@ -215,7 +215,7 @@ def set_gradient_clip(clip, param_list=None, program=None):
def append_gradient_clip_ops(param_grad):
context = dict()
for p, g in param_grad:
with p.block.program.optimization_guard(p):
with p.block.program.optimized_guard(p):
clip_attr = getattr(p, 'gradient_clip_attr', NullGradientClipAttr())
if clip_attr is None:
clip_attr = NullGradientClipAttr()
......@@ -228,7 +228,7 @@ def append_gradient_clip_ops(param_grad):
res = []
for p, g in param_grad:
with p.block.program.optimization_guard(p):
with p.block.program.optimized_guard(p):
res.append(clip_attr.create_operators(param=p, grad=g))
return res
......
......@@ -1103,7 +1103,7 @@ class Program(object):
self._op_role_var = [var_name]
@contextlib.contextmanager
def optimization_guard(self, var):
def optimized_guard(self, var):
"""
A with guard to set :code:`Optimization` :code:`OpRole` and
:code:`OpRoleVar` automatically.
......@@ -1116,7 +1116,7 @@ class Program(object):
Examples:
>>> p, g = backward(...)
>>> with program.optimization_guard(p):
>>> with program.optimized_guard(p):
>>> p = p - 0.001 * g
"""
OpRole = core.op_proto_and_checker_maker.OpRole
......
......@@ -226,7 +226,7 @@ class Optimizer(object):
optimize_ops = []
for param_and_grad in parameters_and_grads:
with param_and_grad[0].block.program.optimization_guard(
with param_and_grad[0].block.program.optimized_guard(
param_and_grad[0]):
if param_and_grad[0].trainable is True and param_and_grad[
1] is not None:
......
......@@ -43,7 +43,7 @@ def append_regularization_ops(parameters_and_grads, regularization=None):
"""
params_and_grads = []
for param, grad in parameters_and_grads:
with param.block.program.optimization_guard(param):
with param.block.program.optimized_guard(param):
# If no gradient then we don't need to do anything
if grad is None:
params_and_grads.append((param, grad))
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册