From a247972ddad05490a7b72911521bff0b48cf2d1c Mon Sep 17 00:00:00 2001 From: fengjiayi Date: Wed, 17 Jan 2018 20:31:05 +0800 Subject: [PATCH] fix a error --- python/paddle/v2/fluid/clip.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/paddle/v2/fluid/clip.py b/python/paddle/v2/fluid/clip.py index d1e6987e0..7a36df0da 100644 --- a/python/paddle/v2/fluid/clip.py +++ b/python/paddle/v2/fluid/clip.py @@ -134,8 +134,8 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr): "Class 'GradientClipByGlobalNorm' has not been properly initialized. \ Please call GradientClipByGlobalNorm.init() first.") - @classmethod - def process_context(cls, context, param, grad): + def process_context(self, context, param, grad): + cls = self.__class__ cls.check_init() local_norm_var = layers.reduce_sum( @@ -144,8 +144,8 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr): input=[local_norm_var, cls.global_norm_var], out=[cls.global_norm_var]) - @classmethod - def create_operators(cls, param, grad): + def create_operators(self, param, grad): + cls = self.__class__ cls.check_init() if cls.scale_var is None: -- GitLab