提交 773f2f73 编写于 作者: F fengjiayi

fix errors

上级 51985aa2
......@@ -138,8 +138,7 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr):
cls = self.__class__
cls.check_init()
local_norm_var = layers.reduce_sum(
x=layers.pow(x=grad, factor=2), reduce_all=True)
local_norm_var = layers.reduce_sum(input=layers.pow(x=grad, factor=2.0))
layers.sums(
input=[local_norm_var, cls.global_norm_var],
out=[cls.global_norm_var])
......@@ -154,6 +153,8 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr):
x=cls.clip_norm_var,
y=layers.elementwise_max(
x=cls.clip_norm_var, y=cls.global_norm_var))
assert cls.scale_var.shape == (1L, )
new_grad = layers.elementwise_mul(x=grad, y=cls.scale_var)
return param, new_grad
......
......@@ -48,7 +48,7 @@ __all__ = [
'mean', 'mul', 'reshape', 'scale', 'transpose',
'sigmoid_cross_entropy_with_logits', 'elementwise_add', 'elementwise_div',
'elementwise_sub', 'elementwise_mul', 'elementwise_max', 'elementwise_min',
'clip', 'clip_by_norm', 'sequence_softmax', 'reduce_sum'
'clip', 'clip_by_norm', 'sequence_softmax'
] + __activations__
for _OP in set(__all__):
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册