ffn.py 2.1 KB
Newer Older
L
lifuchen 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
import paddle.fluid.dygraph as dg
import paddle.fluid.layers as layers
import paddle.fluid as fluid
import math
from parakeet.modules.customized import Conv1D


class PositionwiseFeedForward(dg.Layer):
    ''' A two-feed-forward-layer module '''
    def __init__(self, d_in, num_hidden, filter_size, padding=0, use_cudnn=True, dropout=0.1):
        super(PositionwiseFeedForward, self).__init__()
        self.num_hidden = num_hidden
        self.use_cudnn = use_cudnn
        self.dropout = dropout

        k = math.sqrt(1 / d_in)
L
lifuchen 已提交
17 18
        self.w_1 = Conv1D(num_channels = d_in, 
                        num_filters = num_hidden, 
L
lifuchen 已提交
19 20 21 22
                        filter_size = filter_size,
                        padding=padding,
                        param_attr = fluid.ParamAttr(initializer=fluid.initializer.XavierInitializer()),
                        bias_attr = fluid.ParamAttr(initializer=fluid.initializer.Uniform(low=-k, high=k)),
L
lifuchen 已提交
23
                        use_cudnn = use_cudnn)
L
lifuchen 已提交
24
        k = math.sqrt(1 / num_hidden)
L
lifuchen 已提交
25 26
        self.w_2 = Conv1D(num_channels = num_hidden,
                        num_filters = d_in,
L
lifuchen 已提交
27 28 29 30
                        filter_size = filter_size,
                        padding=padding,
                        param_attr = fluid.ParamAttr(initializer=fluid.initializer.XavierInitializer()),
                        bias_attr = fluid.ParamAttr(initializer=fluid.initializer.Uniform(low=-k, high=k)),
L
lifuchen 已提交
31
                        use_cudnn = use_cudnn)
L
lifuchen 已提交
32 33 34 35 36 37 38 39 40 41 42
        self.layer_norm = dg.LayerNorm(d_in)

    def forward(self, input):
        """
        Feed Forward Network.
        
        Args:
            input (Variable): Shape(B, T, C), dtype: float32. The input value.
        Returns:
            output (Variable), Shape(B, T, C), the result after FFN.
        """
L
lifuchen 已提交
43
        x = layers.transpose(input, [0,2,1])
L
lifuchen 已提交
44
        #FFN Networt
L
lifuchen 已提交
45
        x = self.w_2(layers.relu(self.w_1(x)))
L
lifuchen 已提交
46 47 48 49
        
        # dropout
        x = layers.dropout(x, self.dropout)

L
lifuchen 已提交
50
        x = layers.transpose(x, [0,2,1])
L
lifuchen 已提交
51 52 53 54 55 56 57
        # residual connection
        x = x + input
        
        #layer normalization
        output = self.layer_norm(x)

        return output