test_rnn_nets.py 12.2 KB
Newer Older
F
Feiyu Chan 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import paddle
paddle.set_default_dtype("float64")
from paddle.fluid.layers import sequence_mask

import numpy as np
import unittest

from convert import convert_params_for_net
from rnn_numpy import SimpleRNN, LSTM, GRU


class TestSimpleRNN(unittest.TestCase):
    def __init__(self, time_major=True, direction="forward", place="cpu"):
        super(TestSimpleRNN, self).__init__("runTest")
        self.time_major = time_major
        self.direction = direction
        self.num_directions = 2 if direction == "bidirectional" else 1
32
        self.place = place
F
Feiyu Chan 已提交
33 34

    def setUp(self):
35 36 37 38
        # Since `set_device` is global, set `set_device` in `setUp` rather than
        # `__init__` to avoid using an error device set by another test case.
        place = paddle.set_device(self.place)
        paddle.disable_static(place)
F
Feiyu Chan 已提交
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57
        rnn1 = SimpleRNN(
            16, 32, 2, time_major=self.time_major, direction=self.direction)
        rnn2 = paddle.nn.SimpleRNN(
            16, 32, 2, time_major=self.time_major, direction=self.direction)
        convert_params_for_net(rnn1, rnn2)

        self.rnn1 = rnn1
        self.rnn2 = rnn2

    def test_with_initial_state(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])
        prev_h = np.random.randn(2 * self.num_directions, 4, 32)

        y1, h1 = rnn1(x, prev_h)
58
        y2, h2 = rnn2(paddle.to_tensor(x), paddle.to_tensor(prev_h))
F
Feiyu Chan 已提交
59 60 61 62 63 64 65 66 67 68 69 70
        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)

    def test_with_zero_state(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])

        y1, h1 = rnn1(x)
71
        y2, h2 = rnn2(paddle.to_tensor(x))
F
Feiyu Chan 已提交
72 73 74 75 76 77 78 79 80 81 82 83 84 85
        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)

    def test_with_input_lengths(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])
        sequence_length = np.array([12, 10, 9, 8], dtype=np.int64)

        y1, h1 = rnn1(x, sequence_length=sequence_length)

86
        seq_len = paddle.to_tensor(sequence_length)
F
Feiyu Chan 已提交
87 88 89
        mask = sequence_mask(seq_len, dtype=paddle.get_default_dtype())
        if self.time_major:
            mask = paddle.transpose(mask, [1, 0])
90
        y2, h2 = rnn2(paddle.to_tensor(x), sequence_length=seq_len)
F
Feiyu Chan 已提交
91 92 93 94 95
        y2 = paddle.multiply(y2, mask, axis=0)

        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)

G
Guo Sheng 已提交
96 97 98
    def test_predict(self):
        predict_test_util(self.place, "SimpleRNN")

F
Feiyu Chan 已提交
99 100 101 102
    def runTest(self):
        self.test_with_initial_state()
        self.test_with_zero_state()
        self.test_with_input_lengths()
G
Guo Sheng 已提交
103
        self.test_predict()
F
Feiyu Chan 已提交
104 105 106 107 108 109 110 111


class TestGRU(unittest.TestCase):
    def __init__(self, time_major=True, direction="forward", place="cpu"):
        super(TestGRU, self).__init__("runTest")
        self.time_major = time_major
        self.direction = direction
        self.num_directions = 2 if direction == "bidirectional" else 1
112
        self.place = place
F
Feiyu Chan 已提交
113 114

    def setUp(self):
115 116 117 118
        # Since `set_device` is global, set `set_device` in `setUp` rather than
        # `__init__` to avoid using an error device set by another test case.
        place = paddle.set_device(self.place)
        paddle.disable_static(place)
F
Feiyu Chan 已提交
119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143
        rnn1 = GRU(16,
                   32,
                   2,
                   time_major=self.time_major,
                   direction=self.direction)
        rnn2 = paddle.nn.GRU(16,
                             32,
                             2,
                             time_major=self.time_major,
                             direction=self.direction)
        convert_params_for_net(rnn1, rnn2)

        self.rnn1 = rnn1
        self.rnn2 = rnn2

    def test_with_initial_state(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])
        prev_h = np.random.randn(2 * self.num_directions, 4, 32)

        y1, h1 = rnn1(x, prev_h)
144
        y2, h2 = rnn2(paddle.to_tensor(x), paddle.to_tensor(prev_h))
F
Feiyu Chan 已提交
145 146 147 148 149 150 151 152 153 154 155 156
        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)

    def test_with_zero_state(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])

        y1, h1 = rnn1(x)
157
        y2, h2 = rnn2(paddle.to_tensor(x))
F
Feiyu Chan 已提交
158 159 160 161 162 163 164 165 166 167 168 169 170 171
        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)

    def test_with_input_lengths(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])
        sequence_length = np.array([12, 10, 9, 8], dtype=np.int64)

        y1, h1 = rnn1(x, sequence_length=sequence_length)

172
        seq_len = paddle.to_tensor(sequence_length)
F
Feiyu Chan 已提交
173 174 175
        mask = sequence_mask(seq_len, dtype=paddle.get_default_dtype())
        if self.time_major:
            mask = paddle.transpose(mask, [1, 0])
176
        y2, h2 = rnn2(paddle.to_tensor(x), sequence_length=seq_len)
F
Feiyu Chan 已提交
177 178 179 180 181
        y2 = paddle.multiply(y2, mask, axis=0)

        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)

G
Guo Sheng 已提交
182 183 184
    def test_predict(self):
        predict_test_util(self.place, "GRU")

F
Feiyu Chan 已提交
185 186 187 188
    def runTest(self):
        self.test_with_initial_state()
        self.test_with_zero_state()
        self.test_with_input_lengths()
G
Guo Sheng 已提交
189
        self.test_predict()
F
Feiyu Chan 已提交
190 191 192 193 194 195 196 197


class TestLSTM(unittest.TestCase):
    def __init__(self, time_major=True, direction="forward", place="cpu"):
        super(TestLSTM, self).__init__("runTest")
        self.time_major = time_major
        self.direction = direction
        self.num_directions = 2 if direction == "bidirectional" else 1
198
        self.place = place
F
Feiyu Chan 已提交
199 200

    def setUp(self):
201 202 203 204
        # Since `set_device` is global, set `set_device` in `setUp` rather than
        # `__init__` to avoid using an error device set by another test case.
        place = paddle.set_device(self.place)
        paddle.disable_static(place)
F
Feiyu Chan 已提交
205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225
        rnn1 = LSTM(
            16, 32, 2, time_major=self.time_major, direction=self.direction)
        rnn2 = paddle.nn.LSTM(
            16, 32, 2, time_major=self.time_major, direction=self.direction)
        convert_params_for_net(rnn1, rnn2)

        self.rnn1 = rnn1
        self.rnn2 = rnn2

    def test_with_initial_state(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])
        prev_h = np.random.randn(2 * self.num_directions, 4, 32)
        prev_c = np.random.randn(2 * self.num_directions, 4, 32)

        y1, (h1, c1) = rnn1(x, (prev_h, prev_c))
        y2, (h2, c2) = rnn2(
226 227
            paddle.to_tensor(x),
            (paddle.to_tensor(prev_h), paddle.to_tensor(prev_c)))
F
Feiyu Chan 已提交
228 229 230 231 232 233 234 235 236 237 238 239 240
        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(c1, c2.numpy(), atol=1e-8, rtol=1e-5)

    def test_with_zero_state(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])

        y1, (h1, c1) = rnn1(x)
241
        y2, (h2, c2) = rnn2(paddle.to_tensor(x))
F
Feiyu Chan 已提交
242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(c1, c2.numpy(), atol=1e-8, rtol=1e-5)

    def test_with_input_lengths(self):
        rnn1 = self.rnn1
        rnn2 = self.rnn2

        x = np.random.randn(12, 4, 16)
        if not self.time_major:
            x = np.transpose(x, [1, 0, 2])
        sequence_length = np.array([12, 10, 9, 8], dtype=np.int64)

        y1, (h1, c1) = rnn1(x, sequence_length=sequence_length)

257
        seq_len = paddle.to_tensor(sequence_length)
F
Feiyu Chan 已提交
258 259 260
        mask = sequence_mask(seq_len, dtype=paddle.get_default_dtype())
        if self.time_major:
            mask = paddle.transpose(mask, [1, 0])
261
        y2, (h2, c2) = rnn2(paddle.to_tensor(x), sequence_length=seq_len)
F
Feiyu Chan 已提交
262 263 264 265 266 267
        y2 = paddle.multiply(y2, mask, axis=0)

        np.testing.assert_allclose(y1, y2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(h1, h2.numpy(), atol=1e-8, rtol=1e-5)
        np.testing.assert_allclose(c1, c2.numpy(), atol=1e-8, rtol=1e-5)

268
    def test_predict(self):
G
Guo Sheng 已提交
269
        predict_test_util(self.place, "LSTM")
270

F
Feiyu Chan 已提交
271 272 273 274
    def runTest(self):
        self.test_with_initial_state()
        self.test_with_zero_state()
        self.test_with_input_lengths()
275
        self.test_predict()
F
Feiyu Chan 已提交
276 277


G
Guo Sheng 已提交
278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325
def predict_test_util(place, mode):
    place = paddle.set_device(place)
    paddle.seed(123)
    np.random.seed(123)

    class Net(paddle.nn.Layer):
        def __init__(self):
            super(Net, self).__init__()
            self.rnn = getattr(paddle.nn, mode)(16,
                                                32,
                                                2,
                                                direction="bidirectional",
                                                dropout=0.1)

        def forward(self, input):
            return self.rnn(input)

    x = paddle.randn((4, 10, 16))
    x.stop_gradient = False
    seq_len = paddle.to_tensor(np.array([10, 6, 8, 5]))
    mask = sequence_mask(seq_len, maxlen=10, dtype=x.dtype)
    mask = paddle.unsqueeze(mask, [2])
    rnn = Net()
    y, _ = rnn(x)
    y = y * mask
    loss = paddle.mean(y)
    loss.backward()
    optimizer = paddle.optimizer.Adam(
        learning_rate=0.1, parameters=rnn.parameters())
    optimizer.step()
    rnn.eval()
    y, _ = rnn(x)
    # `jit.to_static` would include a train_program, eval mode might cause
    # some errors currently, such as dropout grad op gets `is_test == True`.
    rnn.train()

    rnn = paddle.jit.to_static(
        rnn, [paddle.static.InputSpec(
            shape=[None, None, 16], dtype=x.dtype)])
    paddle.jit.save(rnn, "./inference/%s_infer" % mode)

    paddle.enable_static()

    new_scope = paddle.static.Scope()
    with paddle.static.scope_guard(new_scope):
        exe = paddle.static.Executor(place)
        [inference_program, feed_target_names,
         fetch_targets] = paddle.static.load_inference_model(
326
             "./inference/%s_infer" % mode, exe)
G
Guo Sheng 已提交
327 328 329 330 331 332 333 334
        results = exe.run(inference_program,
                          feed={feed_target_names[0]: x.numpy()},
                          fetch_list=fetch_targets)
        np.testing.assert_equal(
            y.numpy(), results[0])  # eval results equal predict results
    paddle.disable_static()


F
Feiyu Chan 已提交
335 336 337 338 339 340 341 342 343 344
def load_tests(loader, tests, pattern):
    suite = unittest.TestSuite()
    devices = ["cpu", "gpu"] if paddle.fluid.is_compiled_with_cuda() \
        else ["cpu"]
    for direction in ["forward", "backward", "bidirectional"]:
        for time_major in [True, False]:
            for device in devices:
                for test_class in [TestSimpleRNN, TestLSTM, TestGRU]:
                    suite.addTest(test_class(time_major, direction, device))
    return suite
345 346 347

if __name__ == '__main__':
    unittest.main()