test_sequence_softmax_op.py 1.5 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16 17
import unittest
import numpy as np
from op_test import OpTest
Y
Yiqun Liu 已提交
18
from test_softmax_op import stable_softmax
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40


class TestSequenceSoftmaxOp(OpTest):
    def setUp(self):
        self.op_type = "sequence_softmax"
        x = np.random.uniform(0.1, 1, (11, 1)).astype("float32")
        lod = [[0, 4, 5, 8, 11]]

        out = np.zeros((11, 1)).astype("float32")
        for i in range(4):
            sub_x = x[lod[0][i]:lod[0][i + 1], :]
            sub_x = sub_x.reshape(1, lod[0][i + 1] - lod[0][i])
            sub_out = stable_softmax(sub_x)
            out[lod[0][i]:lod[0][i + 1], :] = sub_out.reshape(
                lod[0][i + 1] - lod[0][i], 1)

        self.inputs = {"X": (x, lod)}
        self.outputs = {"Out": out}

    def test_check_output(self):
        self.check_output()

41 42 43
    def test_check_grad(self):
        self.check_grad(["X"], "Out", max_relative_error=0.01)

44 45 46

if __name__ == "__main__":
    unittest.main()