test_rnn_op.py 6.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import unittest
import numpy as np
import math
import paddle.fluid.core as core
import paddle
import paddle.fluid as fluid
import paddle.fluid.layers as layers
import random
import sys

from op_test import OpTest
28

29 30 31 32 33 34 35 36 37 38
sys.path.append("./rnn")
from rnn_numpy import SimpleRNN, LSTM, GRU
from convert import get_params_for_net

random.seed(2)
np.set_printoptions(threshold=np.inf)
paddle.enable_static()


class TestRNNOp(OpTest):
39

40 41 42 43 44 45 46 47 48 49 50 51
    def get_weight_names(self):
        weight_names = []
        for i in range(self.num_layers):
            for j in range(0, 2 * self.direction_num):
                weight_names.append("{}.weight_{}".format(i, j))
        for i in range(self.num_layers):
            for j in range(0, 2 * self.direction_num):
                weight_names.append("{}.bias_{}".format(i, j))
        return weight_names

    def setUp(self):
        self.op_type = "rnn"
R
ronnywang 已提交
52 53
        self.dtype = np.float32 if core.is_compiled_with_rocm() else np.float64
        self.sequence_length = None if core.is_compiled_with_rocm(
54
        ) else np.array([12, 11, 10, 9, 8], dtype=np.int32)
55 56 57 58
        self.num_layers = 1
        self.is_bidirec = False
        self.mode = "LSTM"
        self.is_test = False
59
        self.dropout = 0.0
60 61 62 63 64 65 66 67 68
        self.set_attrs()

        self.direction_num = 2 if self.is_bidirec else 1
        direction = "bidirectional" if self.is_bidirec else "forward"
        seq_length = 12
        batch_size = 5
        input_size = 3
        hidden_size = 2

69 70 71 72
        input = np.random.uniform(low=-0.1,
                                  high=0.1,
                                  size=(seq_length, batch_size,
                                        input_size)).astype(self.dtype)
73 74 75 76 77 78
        if self.sequence_length is not None:
            input[11][1:][:] = 0
            input[10][2:][:] = 0
            input[9][3:][:] = 0
            input[8][4:][:] = 0

79 80 81 82 83 84 85
        rnn1 = LSTM(input_size,
                    hidden_size,
                    num_layers=self.num_layers,
                    time_major=True,
                    direction=direction,
                    dropout=self.dropout,
                    dtype=self.dtype)
86 87

        flat_w = get_params_for_net(rnn1)
88 89
        output, (last_hidden,
                 last_cell) = rnn1(input, sequence_length=self.sequence_length)
90

R
ronnywang 已提交
91 92 93 94 95 96 97 98
        if core.is_compiled_with_rocm():

            def rocm_rnn_get_place():
                places = [core.CUDAPlace(0)]
                return places

            self._get_places = rocm_rnn_get_place

99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117
        init_h = np.zeros((self.num_layers * self.direction_num, batch_size,
                           hidden_size)).astype(self.dtype)
        init_c = np.zeros((self.num_layers * self.direction_num, batch_size,
                           hidden_size)).astype(self.dtype)
        state_out = np.ndarray((300)).astype("uint8")

        self.inputs = {
            'Input': input,
            'WeightList': flat_w,
            'PreState': [('init_h', init_h), ('init_c', init_c)],
            'SequenceLength': self.sequence_length
        }
        if self.sequence_length is None:
            self.inputs = {
                'Input': input,
                'WeightList': flat_w,
                'PreState': [('init_h', init_h), ('init_c', init_c)],
            }
        self.attrs = {
118
            'dropout_prob': self.dropout,
119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143
            'is_bidirec': self.is_bidirec,
            'input_size': input_size,
            'hidden_size': hidden_size,
            'num_layers': self.num_layers,
            'mode': self.mode,
            'is_test': self.is_test
        }
        self.outputs = {
            'Out': output,
            "State": [('last_hidden', last_hidden), ('last_cell', last_cell)],
            'Reserve': np.ndarray((400)).astype("uint8"),
            'DropoutState': state_out
        }

    def test_output(self):
        self.check_output(no_check_set=['Reserve', 'DropoutState'])

    def set_attrs(self):
        pass

    def test_grad(self):
        if not self.is_test:
            var_name_list = self.get_weight_names()
            grad_check_list = ['Input', 'init_h', 'init_c']
            grad_check_list.extend(var_name_list)
144 145
            self.check_grad(set(grad_check_list),
                            ['Out', 'last_hidden', 'last_cell'])
146 147 148


class TestRNNOp1(TestRNNOp):
149

150 151 152 153 154
    def set_attrs(self):
        self.sequence_length = None


class TestRNNOp2(TestRNNOp):
155

156 157 158 159 160 161
    def set_attrs(self):
        self.sequence_length = None
        self.is_bidirec = True


class TestRNNOp3(TestRNNOp):
162

163 164 165 166 167 168
    def set_attrs(self):
        self.is_test = True
        self.sequence_length = None


class TestRNNOp4(TestRNNOp):
169

170 171 172 173 174 175
    def set_attrs(self):
        self.is_test = True
        self.sequence_length = None
        self.is_bidirec = True


176
class TestRNNOp5(TestRNNOp):
177

178 179 180 181 182
    def set_attrs(self):
        self.num_layers = 2


class TestRNNOp6(TestRNNOp):
183

184 185 186 187 188 189
    def set_attrs(self):
        self.num_layers = 2
        self.is_bidirec = True


class TestRNNOp7(TestRNNOp):
190

191 192 193 194 195 196 197
    def set_attrs(self):
        self.num_layers = 2
        self.is_bidirec = True
        self.is_test = True


class TestRNNOp8(TestRNNOp):
198

199 200 201 202 203 204 205
    def set_attrs(self):
        self.num_layers = 2
        self.is_bidirec = True
        self.sequence_length = None


class TestRNNOp9(TestRNNOp):
206

207 208 209 210
    def set_attrs(self):
        self.num_layers = 3


211 212
if __name__ == '__main__':
    unittest.main()