test_program.py 4.8 KB
Newer Older
1
from __future__ import print_function
Y
Yu Yang 已提交
2
import unittest
3

4
from paddle.v2.fluid.framework import Program, default_main_program, program_guard
5
import paddle.v2.fluid.layers as layers
Y
Yu Yang 已提交
6

Y
Yu Yang 已提交
7 8
main_program = default_main_program()

Y
Yu Yang 已提交
9 10 11

class TestProgram(unittest.TestCase):
    def test_program(self):
Y
Yu Yang 已提交
12
        b = main_program.current_block()
Y
Yu Yang 已提交
13 14 15
        self.assertEqual(-1, b.parent_idx)
        self.assertEqual(0, b.idx)

Y
Yu Yang 已提交
16
        b = main_program.create_block()
Y
Yu Yang 已提交
17 18 19
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
20
        b = main_program.create_block()
Y
Yu Yang 已提交
21 22 23
        self.assertEqual(2, b.idx)
        self.assertEqual(1, b.parent_idx)

Y
Yu Yang 已提交
24
        main_program.rollback()
Y
Yu Yang 已提交
25

Y
Yu Yang 已提交
26
        b = main_program.current_block()
Y
Yu Yang 已提交
27 28 29
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
30
        b = main_program.create_block()
Y
Yu Yang 已提交
31 32 33
        self.assertEqual(3, b.idx)
        self.assertEqual(1, b.parent_idx)

Y
Yu Yang 已提交
34 35
        main_program.rollback()
        b = main_program.current_block()
Y
Yu Yang 已提交
36 37 38
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53
    def test_program_clone(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        # FIXME(yuyang18): We manual compare the output string, since the order
        # of variable could be changed.
54 55
        print(prog)
        print(prog.clone())
Y
Yu Yang 已提交
56

57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
    def test_parse_program_from_string(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        binary_str = prog.desc.serialize_to_string()
        prog_restored = Program.parse_from_string(binary_str)

73 74
        print(prog)
        print(prog_restored)
75

Q
Qiao Longfei 已提交
76
    def test_append_backward(self):
Q
Qiao Longfei 已提交
77
        prog = Program()
Q
Qiao Longfei 已提交
78 79
        block = prog.global_block()

Q
Qiao Longfei 已提交
80
        mul_x = block.create_var(
Q
Qiao Longfei 已提交
81 82 83 84 85 86 87 88 89 90 91
            dtype="float32", shape=[5, 10], lod_level=0, name="mul.x")
        mul_y = block.create_var(
            dtype="float32", shape=[10, 8], lod_level=0, name="mul.y")
        mul_out = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="mul.out")
        mul_op = block.append_op(
            type="mul",
            inputs={"X": [mul_x],
                    "Y": mul_y},
            outputs={"Out": [mul_out]},
            attrs={"x_num_col_dims": 1})
Q
Qiao Longfei 已提交
92 93 94 95 96 97 98 99 100 101 102

        add_y = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="add.y")
        add_out = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="add.out")
        add_op = block.append_op(
            type="elementwise_add",
            inputs={"X": mul_out,
                    "Y": add_y},
            outputs={"Out": add_out},
            attrs={"x_num_col_dims": 1})
103 104 105 106
        mean_out = block.create_var(
            dtype="float32", shape=[1], lod_level=0, name="mean.out")
        block.append_op(
            type="mean", inputs={"X": add_out}, outputs={"Out": mean_out})
Q
Qiao Longfei 已提交
107

108 109
        self.assertEqual(mul_op.idx, 0)
        self.assertEqual(add_op.idx, 1)
110
        param_to_grad = prog.append_backward(mean_out, set())
Q
Qiao Longfei 已提交
111 112 113 114

        def grad_name(name):
            return name + "@GRAD"

115 116
        for var_name in ("mul.x", "mul.y", "mul.out", "add.y", "add.out",
                         "mean.out"):
Q
Qiao Longfei 已提交
117 118 119 120
            self.assertEqual(param_to_grad[var_name][0], grad_name(var_name))
            self.assertEqual(param_to_grad[var_name][1], 0)

        expect_ops = [
121 122
            "mul", "elementwise_add", "mean", "fill_constant", "mean_grad",
            "elementwise_add_grad", "mul_grad"
Q
Qiao Longfei 已提交
123 124 125 126 127
        ]
        actual_ops = []
        for op in block.ops:
            actual_ops.append(op.type)
        self.assertEqual(actual_ops, expect_ops)
Q
Qiao Longfei 已提交
128

129 130 131
    def test_program_clone_with_parameter(self):
        main_program = Program()
        startup_program = Program()
132 133 134 135
        with program_guard(main_program, startup_program):
            d = layers.data(name='x', shape=[784], dtype='float32')
            hidden = layers.fc(input=d, size=100)
            layers.fc(input=hidden, size=100)
136 137 138 139

        new_program = main_program.clone()
        self.assertNotEqual(0, len(new_program.blocks[0].all_parameters()))

Y
Yu Yang 已提交
140 141 142

if __name__ == '__main__':
    unittest.main()