test_split_ids_op.py 2.9 KB
Newer Older
Q
Qiao Longfei 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
M
minqiyang 已提交
17
import six
Q
Qiao Longfei 已提交
18
from op_test import OpTest
Q
qiaolongfei 已提交
19 20
import paddle.fluid.core as core
from paddle.fluid.op import Operator
Q
Qiao Longfei 已提交
21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36


class TestSplitIdsOp(OpTest):
    def setUp(self):
        self.op_type = "split_ids"
        ids = np.array([[0], [2], [2], [3], [5], [5], [6]]).astype('int64')
        out0 = np.array([[0], [3], [6]]).astype('int64')
        out1 = np.array([[]]).astype('int64')
        out2 = np.array([[2], [2], [5], [5]]).astype('int64')
        self.inputs = {'Ids': ids}
        self.outputs = {'Out': [('out0', out0), ('out1', out1), ('out2', out2)]}

    def test_check_output(self):
        self.check_output()


Q
qiaolongfei 已提交
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
class TestSpliteIds(unittest.TestCase):
    def get_places(self):
        places = [core.CPUPlace()]
        return places

    def test_check_output(self):
        for place in self.get_places():
            self.check_with_place(place)

    def check_with_place(self, place):
        scope = core.Scope()
        rows = [0, 5, 7, 4, 9]
        height = 20
        row_numel = 2

        # initialize input variable X
        x = scope.var('X').get_selected_rows()
        x.set_rows(rows)
        x.set_height(height)
        np_array = np.ones((len(rows), row_numel)).astype("float32")
        for i in range(len(rows)):
            for j in range(row_numel):
                np_array[i, j] = rows[i] + j
        x_tensor = x.get_tensor()
        x_tensor.set(np_array, place)

M
minqiyang 已提交
63
        outs_name = ["out%d" % i for i in six.moves.xrange(3)]
Q
qiaolongfei 已提交
64 65 66 67 68
        outs = [
            scope.var(var_name).get_selected_rows() for var_name in outs_name
        ]

        # expected output selected rows
Q
qiaolongfei 已提交
69
        expected_out_rows = [[0, 9], [7, 4], [5]]
Q
qiaolongfei 已提交
70 71 72

        op = Operator("split_ids", Ids="X", Out=outs_name)

73 74 75 76 77 78 79 80 81 82 83 84
        for _ in range(3):
            op.run(scope, place)

            for i in range(len(outs)):
                expected_rows = expected_out_rows[i]
                self.assertEqual(outs[i].rows(), expected_rows)
                for j in range(len(expected_rows)):
                    row = expected_rows[j]
                    self.assertAlmostEqual(
                        float(row), np.array(outs[i].get_tensor())[j, 0])
                    self.assertAlmostEqual(
                        float(row + 1), np.array(outs[i].get_tensor())[j, 1])
Q
qiaolongfei 已提交
85 86


Q
Qiao Longfei 已提交
87 88
if __name__ == '__main__':
    unittest.main()