test_program.py 5.4 KB
Newer Older
D
dzhwinter 已提交
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
from __future__ import print_function
Y
Yu Yang 已提交
16
import unittest
17

Q
qiaolongfei 已提交
18
from paddle.v2.fluid.framework import Program, default_main_program, program_guard, grad_var_name
19
import paddle.v2.fluid.layers as layers
Y
Yu Yang 已提交
20

Y
Yu Yang 已提交
21 22
main_program = default_main_program()

Y
Yu Yang 已提交
23 24 25

class TestProgram(unittest.TestCase):
    def test_program(self):
Y
Yu Yang 已提交
26
        b = main_program.current_block()
Y
Yu Yang 已提交
27 28 29
        self.assertEqual(-1, b.parent_idx)
        self.assertEqual(0, b.idx)

Y
Yu Yang 已提交
30
        b = main_program.create_block()
Y
Yu Yang 已提交
31 32 33
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
34
        b = main_program.create_block()
Y
Yu Yang 已提交
35 36 37
        self.assertEqual(2, b.idx)
        self.assertEqual(1, b.parent_idx)

Y
Yu Yang 已提交
38
        main_program.rollback()
Y
Yu Yang 已提交
39

Y
Yu Yang 已提交
40
        b = main_program.current_block()
Y
Yu Yang 已提交
41 42 43
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
44
        b = main_program.create_block()
Y
Yu Yang 已提交
45 46 47
        self.assertEqual(3, b.idx)
        self.assertEqual(1, b.parent_idx)

Y
Yu Yang 已提交
48 49
        main_program.rollback()
        b = main_program.current_block()
Y
Yu Yang 已提交
50 51 52
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67
    def test_program_clone(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        # FIXME(yuyang18): We manual compare the output string, since the order
        # of variable could be changed.
68 69
        print(prog)
        print(prog.clone())
Y
Yu Yang 已提交
70

71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
    def test_parse_program_from_string(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        binary_str = prog.desc.serialize_to_string()
        prog_restored = Program.parse_from_string(binary_str)

87 88
        print(prog)
        print(prog_restored)
89

Q
Qiao Longfei 已提交
90
    def test_append_backward(self):
Q
Qiao Longfei 已提交
91
        prog = Program()
Q
Qiao Longfei 已提交
92 93
        block = prog.global_block()

Q
Qiao Longfei 已提交
94
        mul_x = block.create_var(
Q
Qiao Longfei 已提交
95 96 97 98 99 100 101 102 103 104 105
            dtype="float32", shape=[5, 10], lod_level=0, name="mul.x")
        mul_y = block.create_var(
            dtype="float32", shape=[10, 8], lod_level=0, name="mul.y")
        mul_out = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="mul.out")
        mul_op = block.append_op(
            type="mul",
            inputs={"X": [mul_x],
                    "Y": mul_y},
            outputs={"Out": [mul_out]},
            attrs={"x_num_col_dims": 1})
Q
Qiao Longfei 已提交
106 107 108 109 110 111 112 113 114 115 116

        add_y = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="add.y")
        add_out = block.create_var(
            dtype="float32", shape=[5, 8], lod_level=0, name="add.out")
        add_op = block.append_op(
            type="elementwise_add",
            inputs={"X": mul_out,
                    "Y": add_y},
            outputs={"Out": add_out},
            attrs={"x_num_col_dims": 1})
117 118 119 120
        mean_out = block.create_var(
            dtype="float32", shape=[1], lod_level=0, name="mean.out")
        block.append_op(
            type="mean", inputs={"X": add_out}, outputs={"Out": mean_out})
Q
Qiao Longfei 已提交
121

122 123
        self.assertEqual(mul_op.idx, 0)
        self.assertEqual(add_op.idx, 1)
124
        param_to_grad = prog.append_backward(mean_out, set())
Q
Qiao Longfei 已提交
125

126 127
        for var_name in ("mul.x", "mul.y", "mul.out", "add.y", "add.out",
                         "mean.out"):
Q
qiaolongfei 已提交
128 129
            self.assertEqual(param_to_grad[var_name][0],
                             grad_var_name(var_name))
Q
Qiao Longfei 已提交
130 131 132
            self.assertEqual(param_to_grad[var_name][1], 0)

        expect_ops = [
133 134
            "mul", "elementwise_add", "mean", "fill_constant", "mean_grad",
            "elementwise_add_grad", "mul_grad"
Q
Qiao Longfei 已提交
135 136 137 138 139
        ]
        actual_ops = []
        for op in block.ops:
            actual_ops.append(op.type)
        self.assertEqual(actual_ops, expect_ops)
Q
Qiao Longfei 已提交
140

141 142 143
    def test_program_clone_with_parameter(self):
        main_program = Program()
        startup_program = Program()
144 145 146 147
        with program_guard(main_program, startup_program):
            d = layers.data(name='x', shape=[784], dtype='float32')
            hidden = layers.fc(input=d, size=100)
            layers.fc(input=hidden, size=100)
148 149 150 151

        new_program = main_program.clone()
        self.assertNotEqual(0, len(new_program.blocks[0].all_parameters()))

Y
Yu Yang 已提交
152 153 154

if __name__ == '__main__':
    unittest.main()