提交 d23ea4ef 编写于 作者: F fengjiayi

add gradient clip by norm

上级 03c858a9
......@@ -77,6 +77,18 @@ class GradientClipByValue(BaseGradientClipAttr):
return param, new_grad
class GradientClipByNorm(BaseGradientClipAttr):
def __init__(self, clip_norm):
self.clip_norm = clip_norm
def process_context(self, context, p_g):
pass
def create_operators(self, param, grad):
new_grad = layers.clip_by_norm(x=grad, max_norm=self.clip_norm)
return param, new_grad
def append_gradient_clip_ops(param_grad):
context = dict()
create_op_callbacks = []
......
......@@ -16,6 +16,7 @@ __all__ = [
'elementwise_sub',
'elementwise_mul',
'clip',
'clip_by_norm',
'sequence_softmax',
] + __activations__
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册