test_lstm_op.py 6.1 KB
Newer Older
1 2 3 4
import unittest
import numpy as np
from op_test import OpTest

5 6 7 8
SIGMOID_THRESHOLD_MIN = -40.0
SIGMOID_THRESHOLD_MAX = 13.0
EXP_MAX_INPUT = 40.0

9 10 11 12 13 14

def identity(x):
    return x


def sigmoid(x):
15 16 17 18
    y = np.copy(x)
    y[x < SIGMOID_THRESHOLD_MIN] = SIGMOID_THRESHOLD_MIN
    y[x > SIGMOID_THRESHOLD_MAX] = SIGMOID_THRESHOLD_MAX
    return 1. / (1. + np.exp(-y))
19 20 21


def tanh(x):
22 23 24
    y = -2. * x
    y[y > EXP_MAX_INPUT] = EXP_MAX_INPUT
    return (2. / (1. + np.exp(y))) - 1.
25 26 27 28 29 30


def relu(x):
    return np.maximum(x, 0)


D
dangqingqing 已提交
31 32 33 34 35 36 37 38
ACTVATION = {
    'identity': identity,
    'sigmoid': sigmoid,
    'tanh': tanh,
    'relu': relu
}


39 40 41 42 43 44 45 46 47
def lstm(
        input,  # T x 4D
        lod,  # 1 x N
        h0=None,  # N x D
        c0=None,  # N x D
        w_h=None,  # D x 4D
        w_b=None,  # 1 x 4D
        w_c=None,  # 1 x 3D
        is_reverse=False,
D
dangqingqing 已提交
48 49 50 51
        act_gate=None,
        act_cell=None,
        act_cand=None):
    def _step(x, w_h, w_c, h_pre, c_pre, act_gate, act_cell, act_cand):
52 53 54
        g = np.dot(h_pre, w_h)  # 1 x 4D
        g = g + x
        g = np.reshape(g, (1, g.size))
D
dangqingqing 已提交
55
        c, g_i, g_f, g_o = np.split(g, 4, axis=1)
56
        if w_c is None:
D
dangqingqing 已提交
57 58
            g_i = act_gate(g_i)  # 1 x D
            g_f = act_gate(g_f)  # 1 x D
59 60
        else:
            w_ic, w_fc, w_oc = np.split(w_c, 3, axis=1)
D
dangqingqing 已提交
61 62
            g_i = act_gate(g_i + w_ic * c_pre)  # 1 x D
            g_f = act_gate(g_f + w_fc * c_pre)  # 1 x D
D
dangqingqing 已提交
63
        c = g_f * c_pre + g_i * act_cand(c)  # 1 x D
64 65

        if w_c is None:
D
dangqingqing 已提交
66
            g_o = act_gate(g_o)  # 1 x D
67 68
        else:
            _, _, w_oc = np.split(w_c, 3, axis=1)
D
dangqingqing 已提交
69 70
            g_o = act_gate(g_o + w_oc * c)  # 1 x D
        h = g_o * act_cell(c)
D
dangqingqing 已提交
71
        return h, c
72

D
dangqingqing 已提交
73 74 75 76 77 78 79
    def _reverse(x, lod):
        y = np.zeros_like(x)
        for i in range(len(lod) - 1):
            b, e = lod[i], lod[i + 1]
            y[b:e, :] = np.flip(x[b:e, :], 0)
        return y

80 81 82 83
    offset = lod[0]
    batch_size = len(offset) - 1
    hidden = []
    cell = []
D
dangqingqing 已提交
84
    input = _reverse(input, offset) if is_reverse else input
85 86 87 88 89 90 91
    if w_b is not None:
        input = input + np.tile(w_b, (offset[-1], 1))
    for i in range(batch_size):
        # compute one sequence
        seq_len = offset[i + 1] - offset[i]
        x = input[offset[i]:offset[i + 1], :]
        h_pre = h0[i]  # 1 x D
92
        c_pre = c0[i]  # 1 x D
93 94
        for j in range(seq_len):
            # compute one step
D
dangqingqing 已提交
95 96
            h_pre, c_pre = _step(x[j], w_h, w_c, h_pre, c_pre, act_gate,
                                 act_cell, act_cand)
97 98 99
            hidden.append(h_pre.flatten())
            cell.append(c_pre.flatten())

100 101
    hidden = np.array(hidden).astype('float64')
    cell = np.array(cell).astype('float64')
D
dangqingqing 已提交
102 103 104 105

    hidden = _reverse(hidden, offset) if is_reverse else hidden
    cell = _reverse(cell, offset) if is_reverse else cell

106 107
    assert hidden.shape == (input.shape[0], input.shape[1] / 4)
    assert cell.shape == (input.shape[0], input.shape[1] / 4)
D
dangqingqing 已提交
108
    return hidden, cell
109 110


D
dangqingqing 已提交
111
class TestLstmOp(OpTest):
112
    def set_argument(self):
113
        self.lod = [[0, 2, 5, 7]]
114 115
        self.D = 16

116 117 118
        self.act_gate = 'sigmoid'
        self.act_cell = 'tanh'
        self.act_cand = 'tanh'
D
dangqingqing 已提交
119

120
        self.has_initial_state = True
121
        self.has_bias = True
D
dangqingqing 已提交
122 123 124
        self.is_reverse = False

    def setUp(self):
125
        self.set_argument()
126
        self.op_type = 'lstm'
D
dangqingqing 已提交
127 128 129 130

        T = self.lod[0][-1]
        N = len(self.lod[0]) - 1

131 132 133 134 135
        x = np.random.normal(size=(T, 4 * self.D)).astype('float64')
        h0 = np.zeros((N, self.D)).astype('float64')
        c0 = np.zeros((N, self.D)).astype('float64')
        w = np.random.normal(size=(self.D, 4 * self.D)).astype('float64')
        b = np.random.normal(size=(1, 7 * self.D)).astype('float64')
D
dangqingqing 已提交
136

137 138
        w_b = b[:, 0:4 * self.D] if self.has_bias else None
        w_c = b[:, 4 * self.D:] if self.has_bias else None
D
dangqingqing 已提交
139 140 141
        h, c = lstm(x, self.lod, h0, c0, w, w_b, w_c, self.is_reverse,
                    ACTVATION[self.act_gate], ACTVATION[self.act_cell],
                    ACTVATION[self.act_cand])
142

143 144 145 146 147
        self.inputs = {'Input': (x, self.lod), 'Weight': w}

        if self.has_bias:
            self.inputs['Bias'] = b

D
dangqingqing 已提交
148 149 150
        if self.has_initial_state:
            self.inputs['H0'] = h0
            self.inputs['C0'] = c0
151

152 153 154 155
        self.outputs = {
            'Hidden': (h, self.lod),
            'Cell': (c, self.lod),
        }
156
        self.attrs = {
157 158 159 160 161
            'use_peepholes': True,
            'is_reverse': self.is_reverse,
            'gate_activation': self.act_gate,
            'cell_activation': self.act_cell,
            'candidate_activation': self.act_cand
162 163
        }

164
    def not_test_check_output(self):
D
dangqingqing 已提交
165
        self.check_output(atol=1e-8)
166

167
    #TODO(qingqing) add more unit testing case
168
    def not_test_check_grad(self):
D
dangqingqing 已提交
169 170 171 172 173
        # TODO(qingqing) remove folowing lines after the check_grad is refined.
        N = len(self.lod[0]) - 1
        self.outputs['BatchGate'] = np.zeros((N, 4 * self.D)).astype('float64')
        self.outputs['BatchCellPreAct'] = np.zeros(
            (N, self.D)).astype('float64')
174
        self.check_grad(
D
dangqingqing 已提交
175
            ['Input', 'Weight', 'Bias'], ['Hidden'], max_relative_error=5e-4)
176 177 178 179


class TestLstmOpHasNoInitial(TestLstmOp):
    def set_argument(self):
180
        self.lod = [[0, 2, 5, 7]]
D
dangqingqing 已提交
181
        self.D = 16
182 183 184 185 186 187 188

        self.act_gate = 'sigmoid'
        self.act_cell = 'tanh'
        self.act_cand = 'tanh'

        self.has_initial_state = False
        self.is_reverse = True
189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206
        self.has_bias = True


class TestLstmOpHasNoBias(TestLstmOp):
    def set_argument(self):
        self.lod = [[0, 2, 5, 7]]
        self.D = 16

        self.act_gate = 'sigmoid'
        self.act_cell = 'tanh'
        self.act_cand = 'tanh'

        self.has_initial_state = True
        self.is_reverse = False
        self.has_bias = False

    def test_check_output(self):
        self.check_output(atol=1e-8)
207 208 209 210


class TestLstmOpRerverse(TestLstmOp):
    def set_argument(self):
211
        self.lod = [[0, 2, 5, 7]]
D
dangqingqing 已提交
212
        self.D = 16
213 214 215 216 217 218 219

        self.act_gate = 'sigmoid'
        self.act_cell = 'tanh'
        self.act_cand = 'tanh'

        self.has_initial_state = True
        self.is_reverse = True
220
        self.has_bias = True
221 222 223


if __name__ == '__main__':
224
    unittest.main()