未验证 提交 04e9d721 编写于 作者: H hong 提交者: GitHub

unitize name in optimizer; test=develop (#24008)

上级 fb82d72c
......@@ -68,16 +68,13 @@ class Optimizer(object):
grad_clip=None,
name=None):
self._parameter_list = parameter_list
self._name = name
if framework.in_dygraph_mode():
if not isinstance(learning_rate, float) and \
not isinstance(learning_rate, LearningRateDecay):
raise TypeError(
"learning rate should be float or LearningRateDecay, got %s here"
% type(learning_rate))
if name is not None:
self._name = unique_name.generate(name)
else:
self._name = unique_name.generate(self.__class__.__name__)
if self._parameter_list is None:
raise AttributeError(
"parameter_list argument given to the Optimizer should not be None in dygraph mode."
......@@ -96,7 +93,6 @@ class Optimizer(object):
raise TypeError(
"learning rate should be float or Variable, got %s here" %
type(learning_rate))
self._name = name
if grad_clip is not None:
if not isinstance(grad_clip, GradientClipBase):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册