提交 538f1ad2 编写于 作者: F fengjiayi

tiny fix

上级 408a6b8b
...@@ -124,11 +124,7 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr): ...@@ -124,11 +124,7 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr):
cls.global_norm_var = layers.fill_constant( cls.global_norm_var = layers.fill_constant(
shape=[1], dtype="float32", value=0.0) shape=[1], dtype="float32", value=0.0)
cls.local_norm_var = framework.default_main_program().block( cls.local_norm_var = layers.create_tensor(dtype="float32")
0).create_var(
name=framework.unique_name("local_norm"),
dtype="float32",
persistable=False)
cls.clip_norm_var = layers.fill_constant( cls.clip_norm_var = layers.fill_constant(
shape=[1], dtype="float32", value=clip_norm) shape=[1], dtype="float32", value=clip_norm)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册