提交 224e5fcc 编写于 作者: W wangyanfei01

fix bug:

 * gradient_clipping_threshold should be allowed to set with parameter-grain
上级 9e9ae2f3
...@@ -19,34 +19,34 @@ __all__ = [ ...@@ -19,34 +19,34 @@ __all__ = [
def convert_and_compare(x, Type): def convert_and_compare(x, Type):
""" """
Convert x to be the same type as Type and then convert back to Convert x to be the same type as Type and then convert back to
check whether there is a loss of information check whether there is a loss of information
:param x: object to be checked :param x: object to be checked
:param Type: target type to check x over :param Type: target type to check x over
""" """
return type(x)(Type(x)) == x return type(x)(Type(x)) == x
def is_compatible_with(x, Type): def is_compatible_with(x, Type):
""" """
Check if x has a type compatible with Type Check if x has a type compatible with Type
:param x: object to be checked :param x: object to be checked
:param Type: target type to check x over :param Type: target type to check x over
""" """
if type(x) == Type: if type(x) == Type:
return True return True
try: try:
if float == Type or int == Type: if float == Type or int == Type:
# avoid those types that can be converted to float/int but not very # avoid those types that can be converted to float/int but not very
# meaningful and could potentially lead to error # meaningful and could potentially lead to error
# i.e., str and bool typed value should not be used for initializing float/int variable # i.e., str and bool typed value should not be used for initializing float/int variable
if not isinstance(x, str) and not isinstance(x, bool): if not isinstance(x, str) and not isinstance(x, bool):
return convert_and_compare(x, Type) return convert_and_compare(x, Type)
elif bool == Type: elif bool == Type:
# should not use string type to initialize bool variable # should not use string type to initialize bool variable
if not isinstance(x, str): if not isinstance(x, str):
return convert_and_compare(x, Type) return convert_and_compare(x, Type)
else: else:
...@@ -88,6 +88,10 @@ class ParameterAttribute(object): ...@@ -88,6 +88,10 @@ class ParameterAttribute(object):
:type learning_rate: float or None :type learning_rate: float or None
:param momentum: The parameter momentum. None means use global value. :param momentum: The parameter momentum. None means use global value.
:type momentum: float or None :type momentum: float or None
:param gradient_clipping_threshold: gradient clipping threshold. If gradient
value larger than some value, will be
clipped.
:type gradient_clipping_threshold: float
:param sparse_update: Enable sparse update for this parameter. It will :param sparse_update: Enable sparse update for this parameter. It will
enable both local and remote sparse update. enable both local and remote sparse update.
:type sparse_update: bool :type sparse_update: bool
...@@ -104,6 +108,7 @@ class ParameterAttribute(object): ...@@ -104,6 +108,7 @@ class ParameterAttribute(object):
l2_rate=None, l2_rate=None,
learning_rate=None, learning_rate=None,
momentum=None, momentum=None,
gradient_clipping_threshold=None,
sparse_update=False): sparse_update=False):
# initialize strategy. # initialize strategy.
if is_static: if is_static:
...@@ -152,6 +157,11 @@ class ParameterAttribute(object): ...@@ -152,6 +157,11 @@ class ParameterAttribute(object):
self.attr['sparse_update'] = True self.attr['sparse_update'] = True
self.attr['sparse_remote_update'] = True self.attr['sparse_remote_update'] = True
if gradient_clipping_threshold is not None and \
is_compatible_with(gradient_clipping_threshold, float):
self.attr['gradient_clipping_threshold'] = \
gradient_clipping_threshold
def set_default_parameter_name(self, name): def set_default_parameter_name(self, name):
""" """
Set default parameter name. If parameter not set, then will use default Set default parameter name. If parameter not set, then will use default
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册