test_operator_desc.py 4.4 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15 16
from __future__ import print_function

F
fengjiayi 已提交
17
import unittest
Y
Yu Yang 已提交
18

19
import paddle.fluid.core as core
M
minqiyang 已提交
20
import paddle.compat as cpt
F
fengjiayi 已提交
21

22
from paddle.fluid.framework import Program, default_startup_program
Y
Yu Yang 已提交
23 24 25

main_program = default_startup_program()

F
fengjiayi 已提交
26 27 28

class TestOperator(unittest.TestCase):
    def test_error_type(self):
W
Wu Yi 已提交
29
        block = main_program._create_block()
F
fengjiayi 已提交
30 31 32 33 34
        try:
            block.append_op()
            self.assertFail()
        except ValueError as v_err:
            self.assertEqual(
M
minqiyang 已提交
35
                cpt.get_exception_message(v_err),
36
                "`type` to initialized an Operator can not be None.")
F
fengjiayi 已提交
37 38 39
        try:
            block.append_op(type="no_such_op")
            self.assertFail()
Y
Yu Yang 已提交
40
        except ValueError as a_err:
M
minqiyang 已提交
41 42 43
            self.assertEqual(
                cpt.get_exception_message(a_err),
                "Operator \"no_such_op\" has not been registered.")
F
fengjiayi 已提交
44

F
fengjiayi 已提交
45
    def test_op_desc_creation(self):
46 47
        program = Program()
        block = program.current_block()
F
fengjiayi 已提交
48 49 50 51 52 53 54 55 56 57
        mul_x = block.create_var(
            dtype="float32", shape=[5, 10], lod_level=0, name="mul.x")
        mul_y = block.create_var(
            dtype="float32", shape=[10, 8], lod_level=0, name="mul.y")
        mul_out = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="mul.out")
        mul_op = block.append_op(
            type="mul",
            inputs={"X": [mul_x],
                    "Y": mul_y},
F
fengjiayi 已提交
58 59
            outputs={"Out": [mul_out]},
            attrs={"x_num_col_dims": 1})
60 61

        self.assertNotEqual(str(mul_op), "")
F
fengjiayi 已提交
62 63
        self.assertEqual(mul_op.type, "mul")
        self.assertEqual(mul_op.input_names, ["X", "Y"])
F
fengjiayi 已提交
64 65
        self.assertEqual(mul_op.input("X"), ["mul.x"])
        self.assertEqual(mul_op.input("Y"), ["mul.y"])
F
fengjiayi 已提交
66
        self.assertEqual(mul_op.output_names, ["Out"])
F
fengjiayi 已提交
67
        self.assertEqual(mul_op.output("Out"), ["mul.out"])
68
        self.assertEqual(
69
            set(mul_op.attr_names),
X
Xin Pan 已提交
70 71
            set([
                "x_num_col_dims", "y_num_col_dims", "op_role", "op_role_var",
P
Physher 已提交
72
                "use_mkldnn", "scale_x", "scale_y", "scale_out",
73 74
                "force_fp32_output", "op_namescope", "op_callstack",
                "op_device", "with_quant_attr"
X
Xin Pan 已提交
75
            ]))
F
fengjiayi 已提交
76 77 78
        self.assertEqual(mul_op.has_attr("x_num_col_dims"), True)
        self.assertEqual(mul_op.attr_type("x_num_col_dims"), core.AttrType.INT)
        self.assertEqual(mul_op.attr("x_num_col_dims"), 1)
79 80 81
        self.assertEqual(mul_op.has_attr("y_num_col_dims"), True)
        self.assertEqual(mul_op.attr_type("y_num_col_dims"), core.AttrType.INT)
        self.assertEqual(mul_op.attr("y_num_col_dims"), 1)
82
        self.assertEqual(mul_op.idx, 0)
F
fengjiayi 已提交
83
        self.assertEqual(mul_out.op, mul_op)
84 85
        mul_op.desc.remove_input("X")
        self.assertEqual(mul_op.input_names, ["Y"])
F
fengjiayi 已提交
86 87

    def test_mult_input(self):
88 89
        program = Program()
        block = program.current_block()
F
fengjiayi 已提交
90 91 92 93 94 95 96 97 98 99 100 101 102 103
        sum_x1 = block.create_var(
            dtype="int", shape=[3, 4], lod_level=0, name="sum.x1")
        sum_x2 = block.create_var(
            dtype="int", shape=[3, 4], lod_level=0, name="sum.x2")
        sum_x3 = block.create_var(
            dtype="int", shape=[3, 4], lod_level=0, name="sum.x3")
        sum_out = block.create_var(
            dtype="int", shape=[3, 4], lod_level=0, name="sum.out")
        sum_op = block.append_op(
            type="sum",
            inputs={"X": [sum_x1, sum_x2, sum_x3]},
            outputs={"Out": sum_out})
        self.assertEqual(sum_op.type, "sum")
        self.assertEqual(sum_op.input_names, ["X"])
F
fengjiayi 已提交
104
        self.assertEqual(sum_op.input("X"), ["sum.x1", "sum.x2", "sum.x3"])
F
fengjiayi 已提交
105
        self.assertEqual(sum_op.output_names, ["Out"])
F
fengjiayi 已提交
106
        self.assertEqual(sum_op.output("Out"), ["sum.out"])
107
        self.assertEqual(sum_op.idx, 0)
F
fengjiayi 已提交
108
        self.assertEqual(sum_out.op, sum_op)
F
fengjiayi 已提交
109 110 111 112


if __name__ == '__main__':
    unittest.main()