test_gru_unit_op.py 4.6 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

G
guosheng 已提交
17 18 19
import math
import unittest
import numpy as np
20
from op_test import OpTest
G
guosheng 已提交
21 22


23 24 25 26 27 28 29 30 31 32 33 34
class GRUActivationType(OpTest):
    identity = 0
    sigmoid = 1
    tanh = 2
    relu = 3


def identity(x):
    return x


def sigmoid(x):
G
guosheng 已提交
35 36 37
    return 1. / (1. + np.exp(-x))


38 39 40 41 42 43
def tanh(x):
    return 2. * sigmoid(2. * x) - 1.


def relu(x):
    return np.maximum(x, 0)
G
guosheng 已提交
44 45 46


class TestGRUUnitOp(OpTest):
47 48
    batch_size = 5
    frame_size = 10
49 50 51 52 53 54 55
    activate = {
        GRUActivationType.identity: identity,
        GRUActivationType.sigmoid: sigmoid,
        GRUActivationType.tanh: tanh,
        GRUActivationType.relu: relu,
    }

Q
Qiao Longfei 已提交
56
    def set_inputs(self, origin_mode=False):
G
guosheng 已提交
57 58
        batch_size = self.batch_size
        frame_size = self.frame_size
59
        self.op_type = 'gru_unit'
G
guosheng 已提交
60
        self.inputs = {
61
            'Input': np.random.uniform(
Y
Yu Yang 已提交
62
                -0.1, 0.1, (batch_size, frame_size * 3)).astype('float64'),
63
            'HiddenPrev': np.random.uniform(
Y
Yu Yang 已提交
64
                -0.1, 0.1, (batch_size, frame_size)).astype('float64'),
65
            'Weight': np.random.uniform(
G
guosheng 已提交
66
                -1. / math.sqrt(frame_size), 1. / math.sqrt(frame_size),
Y
Yu Yang 已提交
67
                (frame_size, frame_size * 3)).astype('float64'),
G
guosheng 已提交
68
        }
69 70
        self.attrs = {
            'activation': GRUActivationType.tanh,
Q
Qiao Longfei 已提交
71 72
            'gate_activation': GRUActivationType.sigmoid,
            'origin_mode': origin_mode
73
        }
G
guosheng 已提交
74

Q
Qiao Longfei 已提交
75
    def set_outputs(self, origin_mode=False):
76
        # GRU calculations
G
guosheng 已提交
77 78
        batch_size = self.batch_size
        frame_size = self.frame_size
79 80 81
        x = self.inputs['Input']
        h_p = self.inputs['HiddenPrev']
        w = self.inputs['Weight']
82
        b = self.inputs['Bias'] if 'Bias' in self.inputs else np.zeros(
G
guosheng 已提交
83
            (1, frame_size * 3))
G
guosheng 已提交
84 85 86
        g = x + np.tile(b, (batch_size, 1))
        w_u_r = w.flatten()[:frame_size * frame_size * 2].reshape(
            (frame_size, frame_size * 2))
87 88
        u_r = self.activate[self.attrs['gate_activation']](np.dot(
            h_p, w_u_r) + g[:, :frame_size * 2])
G
guosheng 已提交
89 90 91 92 93
        u = u_r[:, :frame_size]
        r = u_r[:, frame_size:frame_size * 2]
        r_h_p = r * h_p
        w_c = w.flatten()[frame_size * frame_size * 2:].reshape(
            (frame_size, frame_size))
94 95
        c = self.activate[self.attrs['activation']](np.dot(r_h_p, w_c) +
                                                    g[:, frame_size * 2:])
G
guosheng 已提交
96
        g = np.hstack((u_r, c))
Q
Qiao Longfei 已提交
97 98 99 100
        if origin_mode:
            h = (1 - u) * c + u * h_p
        else:
            h = u * c + (1 - u) * h_p
Y
Yu Yang 已提交
101 102 103 104 105
        self.outputs = {
            'Gate': g.astype('float64'),
            'ResetHiddenPrev': r_h_p.astype('float64'),
            'Hidden': h.astype('float64')
        }
G
guosheng 已提交
106

G
guosheng 已提交
107 108 109 110
    def setUp(self):
        self.set_inputs()
        self.set_outputs()

G
guosheng 已提交
111 112 113
    def test_check_output(self):
        self.check_output()

G
guosheng 已提交
114
    def test_check_grad(self):
115
        self.check_grad(['Input', 'HiddenPrev', 'Weight'], ['Hidden'])
G
guosheng 已提交
116 117


Q
Qiao Longfei 已提交
118 119
class TestGRUUnitOpOriginMode(TestGRUUnitOp):
    def setUp(self):
Q
Qiao Longfei 已提交
120
        self.set_inputs(origin_mode=True)
Q
Qiao Longfei 已提交
121 122 123
        self.set_outputs(origin_mode=True)


G
guosheng 已提交
124
class TestGRUUnitOpWithBias(TestGRUUnitOp):
Q
Qiao Longfei 已提交
125
    def set_inputs(self, origin_mode=False):
G
guosheng 已提交
126 127 128 129
        batch_size = self.batch_size
        frame_size = self.frame_size
        super(TestGRUUnitOpWithBias, self).set_inputs()
        self.inputs['Bias'] = np.random.uniform(
G
guosheng 已提交
130
            -0.1, 0.1, (1, frame_size * 3)).astype('float64')
G
guosheng 已提交
131 132
        self.attrs = {
            'activation': GRUActivationType.identity,
Q
Qiao Longfei 已提交
133 134
            'gate_activation': GRUActivationType.sigmoid,
            'origin_mode': origin_mode
G
guosheng 已提交
135 136
        }

G
guosheng 已提交
137
    def test_check_grad(self):
138 139 140
        self.check_grad(['Input', 'HiddenPrev', 'Weight', 'Bias'], ['Hidden'])

    def test_check_grad_ingore_input(self):
G
guosheng 已提交
141
        self.check_grad(
142 143
            ['HiddenPrev', 'Weight', 'Bias'], ['Hidden'],
            no_grad_set=set('Input'))
G
guosheng 已提交
144 145


Q
Qiao Longfei 已提交
146 147
class TestGRUUnitOpWithBiasOriginMode(TestGRUUnitOpWithBias):
    def setUp(self):
Q
Qiao Longfei 已提交
148
        self.set_inputs(origin_mode=True)
Q
Qiao Longfei 已提交
149 150 151
        self.set_outputs(origin_mode=True)


G
guosheng 已提交
152 153
if __name__ == '__main__':
    unittest.main()