layer_norm.py 2.0 KB
Newer Older
H
Hui Zhang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Layer normalization module."""
import paddle
小湉湉's avatar
小湉湉 已提交
16
from paddle import nn
H
Hui Zhang 已提交
17 18


小湉湉's avatar
小湉湉 已提交
19
class LayerNorm(nn.LayerNorm):
H
Hui Zhang 已提交
20
    """Layer normalization module.
21 22 23
    Args:
        nout (int): Output dim size.
        dim (int): Dimension to be normalized.
H
Hui Zhang 已提交
24 25 26 27
    """

    def __init__(self, nout, dim=-1):
        """Construct an LayerNorm object."""
小湉湉's avatar
小湉湉 已提交
28
        super().__init__(nout)
H
Hui Zhang 已提交
29 30 31 32 33
        self.dim = dim

    def forward(self, x):
        """Apply layer normalization.

34 35
        Args:
            x (Tensor):Input tensor.
H
Hui Zhang 已提交
36

37 38
        Returns: 
            Tensor: Normalized tensor.
H
Hui Zhang 已提交
39
        """
小湉湉's avatar
小湉湉 已提交
40

H
Hui Zhang 已提交
41 42 43 44 45 46 47 48 49 50
        if self.dim == -1:
            return super(LayerNorm, self).forward(x)
        else:
            len_dim = len(x.shape)
            if self.dim < 0:
                self.dim = len_dim + self.dim
            assert self.dim >= 0

            orig_perm = list(range(len_dim))
            new_perm = orig_perm[:]
51 52 53
            # Python style item change is not able when converting dygraph to static graph.
            # new_perm[self.dim], new_perm[len_dim -1] = new_perm[len_dim -1], new_perm[self.dim]
            # use C++ style item change here
小湉湉's avatar
小湉湉 已提交
54 55 56
            temp = new_perm[self.dim]
            new_perm[self.dim] = new_perm[len_dim - 1]
            new_perm[len_dim - 1] = temp
H
Hui Zhang 已提交
57 58 59 60

            return paddle.transpose(
                super(LayerNorm, self).forward(paddle.transpose(x, new_perm)),
                new_perm)