diff --git a/python/paddle/v2/fluid/clip.py b/python/paddle/v2/fluid/clip.py index d1e6987e018859cd1af2ad85f084a55f187861a1..7a36df0dabbca74484f607ec6854a92e613fd951 100644 --- a/python/paddle/v2/fluid/clip.py +++ b/python/paddle/v2/fluid/clip.py @@ -134,8 +134,8 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr): "Class 'GradientClipByGlobalNorm' has not been properly initialized. \ Please call GradientClipByGlobalNorm.init() first.") - @classmethod - def process_context(cls, context, param, grad): + def process_context(self, context, param, grad): + cls = self.__class__ cls.check_init() local_norm_var = layers.reduce_sum( @@ -144,8 +144,8 @@ class GradientClipByGlobalNorm(BaseGradientClipAttr): input=[local_norm_var, cls.global_norm_var], out=[cls.global_norm_var]) - @classmethod - def create_operators(cls, param, grad): + def create_operators(self, param, grad): + cls = self.__class__ cls.check_init() if cls.scale_var is None: