test_program.py 4.9 KB
Newer Older
1
from __future__ import print_function
Y
Yu Yang 已提交
2
import unittest
3

Q
Qiao Longfei 已提交
4 5
from paddle.v2.fluid.framework import Program
from paddle.v2.fluid.framework import g_main_program
6
import paddle.v2.fluid.layers as layers
Y
Yu Yang 已提交
7 8 9 10


class TestProgram(unittest.TestCase):
    def test_program(self):
11
        b = g_main_program.current_block()
Y
Yu Yang 已提交
12 13 14
        self.assertEqual(-1, b.parent_idx)
        self.assertEqual(0, b.idx)

15
        b = g_main_program.create_block()
Y
Yu Yang 已提交
16 17 18
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

19
        b = g_main_program.create_block()
Y
Yu Yang 已提交
20 21 22
        self.assertEqual(2, b.idx)
        self.assertEqual(1, b.parent_idx)

23
        g_main_program.rollback()
Y
Yu Yang 已提交
24

25
        b = g_main_program.current_block()
Y
Yu Yang 已提交
26 27 28
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

29
        b = g_main_program.create_block()
Y
Yu Yang 已提交
30 31 32
        self.assertEqual(3, b.idx)
        self.assertEqual(1, b.parent_idx)

33 34
        g_main_program.rollback()
        b = g_main_program.current_block()
Y
Yu Yang 已提交
35 36 37
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
    def test_program_clone(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        # FIXME(yuyang18): We manual compare the output string, since the order
        # of variable could be changed.
53 54
        print(prog)
        print(prog.clone())
Y
Yu Yang 已提交
55

56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
    def test_parse_program_from_string(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        binary_str = prog.desc.serialize_to_string()
        prog_restored = Program.parse_from_string(binary_str)

72 73
        print(prog)
        print(prog_restored)
74

Q
Qiao Longfei 已提交
75
    def test_append_backward(self):
Q
Qiao Longfei 已提交
76
        prog = Program()
Q
Qiao Longfei 已提交
77 78
        block = prog.global_block()

Q
Qiao Longfei 已提交
79
        mul_x = block.create_var(
Q
Qiao Longfei 已提交
80 81 82 83 84 85 86 87 88 89 90
            dtype="float32", shape=[5, 10], lod_level=0, name="mul.x")
        mul_y = block.create_var(
            dtype="float32", shape=[10, 8], lod_level=0, name="mul.y")
        mul_out = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="mul.out")
        mul_op = block.append_op(
            type="mul",
            inputs={"X": [mul_x],
                    "Y": mul_y},
            outputs={"Out": [mul_out]},
            attrs={"x_num_col_dims": 1})
Q
Qiao Longfei 已提交
91 92 93 94 95 96 97 98 99 100 101

        add_y = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="add.y")
        add_out = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="add.out")
        add_op = block.append_op(
            type="elementwise_add",
            inputs={"X": mul_out,
                    "Y": add_y},
            outputs={"Out": add_out},
            attrs={"x_num_col_dims": 1})
102 103 104 105
        mean_out = block.create_var(
            dtype="float32", shape=[1], lod_level=0, name="mean.out")
        block.append_op(
            type="mean", inputs={"X": add_out}, outputs={"Out": mean_out})
Q
Qiao Longfei 已提交
106

107 108
        self.assertEqual(mul_op.idx, 0)
        self.assertEqual(add_op.idx, 1)
109
        param_to_grad = prog.append_backward(mean_out, set())
Q
Qiao Longfei 已提交
110 111 112 113

        def grad_name(name):
            return name + "@GRAD"

114 115
        for var_name in ("mul.x", "mul.y", "mul.out", "add.y", "add.out",
                         "mean.out"):
Q
Qiao Longfei 已提交
116 117 118 119
            self.assertEqual(param_to_grad[var_name][0], grad_name(var_name))
            self.assertEqual(param_to_grad[var_name][1], 0)

        expect_ops = [
120 121
            "mul", "elementwise_add", "mean", "fill_constant", "mean_grad",
            "elementwise_add_grad", "mul_grad"
Q
Qiao Longfei 已提交
122 123 124 125 126
        ]
        actual_ops = []
        for op in block.ops:
            actual_ops.append(op.type)
        self.assertEqual(actual_ops, expect_ops)
Q
Qiao Longfei 已提交
127

128 129 130 131 132 133 134 135 136 137 138 139 140 141
    def test_program_clone_with_parameter(self):
        main_program = Program()
        startup_program = Program()
        kwargs = {
            'main_program': main_program,
            'startup_program': startup_program
        }
        d = layers.data(name='x', shape=[784], dtype='float32', **kwargs)
        hidden = layers.fc(input=d, size=100, **kwargs)
        layers.fc(input=hidden, size=100, **kwargs)

        new_program = main_program.clone()
        self.assertNotEqual(0, len(new_program.blocks[0].all_parameters()))

Y
Yu Yang 已提交
142 143 144

if __name__ == '__main__':
    unittest.main()