From 29f9935d924eabee2d48b6ca5e52a9c4908bbf08 Mon Sep 17 00:00:00 2001 From: Megvii Engine Team Date: Tue, 12 Oct 2021 14:44:46 +0800 Subject: [PATCH] fix(imperative/python): add layer_norm doc and rm useless param GitOrigin-RevId: 1b15db621ec12c60c6c59f3b3e983c9f81907079 --- imperative/python/megengine/functional/nn.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/imperative/python/megengine/functional/nn.py b/imperative/python/megengine/functional/nn.py index 5324067cf..e741bdf81 100644 --- a/imperative/python/megengine/functional/nn.py +++ b/imperative/python/megengine/functional/nn.py @@ -1084,12 +1084,18 @@ def layer_norm( weight: Optional[Tensor] = None, bias: Optional[Tensor] = None, eps: float = 1e-5, - eps_mode="additive", ): - - assert eps_mode.lower() in {"max", "additive"}, "unknown eps_mode: {}".format( - eps_mode - ) + r"""Applies layer normalization to the input. Support tensor of any shape as input. + Reference: https://arxiv.org/pdf/1803.08494.pdf. + + Args: + inp: input tensor. + normalized_shape: the shape that you want to be normalizated + affine: whether to use weight and bias + weight: must not be None when the affine is true + bias: must not be None when the bias is true + eps: a value added to the denominator for numerical stability. Default: 1e-5 + """ if amp._enabled: inp, weight, bias = cast_tensors(inp, weight, bias, promote=True) -- GitLab