test_program.py 7.9 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
from __future__ import print_function
Y
Yu Yang 已提交
16
import unittest
17

18
from paddle.fluid.framework import Program, default_main_program, program_guard, grad_var_name
19
import paddle
20
import paddle.fluid.layers as layers
21
import paddle.fluid as fluid
Y
Yu Yang 已提交
22

Y
Yu Yang 已提交
23 24
main_program = default_main_program()

Y
Yu Yang 已提交
25 26

class TestProgram(unittest.TestCase):
27

Y
Yu Yang 已提交
28
    def test_program(self):
Y
Yu Yang 已提交
29
        b = main_program.current_block()
Y
Yu Yang 已提交
30 31 32
        self.assertEqual(-1, b.parent_idx)
        self.assertEqual(0, b.idx)

W
Wu Yi 已提交
33
        b = main_program._create_block()
Y
Yu Yang 已提交
34 35 36
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

W
Wu Yi 已提交
37
        b = main_program._create_block()
Y
Yu Yang 已提交
38 39 40
        self.assertEqual(2, b.idx)
        self.assertEqual(1, b.parent_idx)

W
Wu Yi 已提交
41
        main_program._rollback()
Y
Yu Yang 已提交
42

Y
Yu Yang 已提交
43
        b = main_program.current_block()
Y
Yu Yang 已提交
44 45 46
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

W
Wu Yi 已提交
47
        b = main_program._create_block()
Y
Yu Yang 已提交
48 49 50
        self.assertEqual(3, b.idx)
        self.assertEqual(1, b.parent_idx)

W
Wu Yi 已提交
51
        main_program._rollback()
Y
Yu Yang 已提交
52
        b = main_program.current_block()
Y
Yu Yang 已提交
53 54 55
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
56 57 58
    def test_program_clone(self):
        prog = Program()

59 60 61
        x = prog.global_block().create_var(name='X',
                                           shape=[1000, 784],
                                           dtype='float32')
Y
Yu Yang 已提交
62

63 64 65
        y = prog.global_block().create_var(name='Y',
                                           shape=[784, 100],
                                           dtype='float32')
Y
Yu Yang 已提交
66
        out = prog.global_block().create_var(name='Out', dtype='float32')
67 68 69 70 71 72
        prog.global_block().append_op(type="mul",
                                      inputs={
                                          'X': [x],
                                          'Y': [y]
                                      },
                                      outputs={'Out': [out]})
Y
Yu Yang 已提交
73 74 75

        # FIXME(yuyang18): We manual compare the output string, since the order
        # of variable could be changed.
76 77
        print(prog)
        print(prog.clone())
Y
Yu Yang 已提交
78

79 80 81
    def test_parse_program_from_string(self):
        prog = Program()

82 83 84
        x = prog.global_block().create_var(name='X',
                                           shape=[1000, 784],
                                           dtype='float32')
85

86 87 88
        y = prog.global_block().create_var(name='Y',
                                           shape=[784, 100],
                                           dtype='float32')
89
        out = prog.global_block().create_var(name='Out', dtype='float32')
90 91 92 93 94 95
        prog.global_block().append_op(type="mul",
                                      inputs={
                                          'X': [x],
                                          'Y': [y]
                                      },
                                      outputs={'Out': [out]})
96 97 98 99

        binary_str = prog.desc.serialize_to_string()
        prog_restored = Program.parse_from_string(binary_str)

100 101
        print(prog)
        print(prog_restored)
102

103 104 105
    def test_program_clone_with_parameter(self):
        main_program = Program()
        startup_program = Program()
106 107 108 109
        with program_guard(main_program, startup_program):
            d = layers.data(name='x', shape=[784], dtype='float32')
            hidden = layers.fc(input=d, size=100)
            layers.fc(input=hidden, size=100)
110 111 112 113

        new_program = main_program.clone()
        self.assertNotEqual(0, len(new_program.blocks[0].all_parameters()))

114
    def test_program_inference_optimize(self):
115

116
        def net():
117 118 119 120 121
            reader = fluid.layers.py_reader(capacity=10,
                                            shapes=[[-1, 10], [-1, 1]],
                                            lod_levels=[0, 0],
                                            dtypes=['float32', 'int64'],
                                            use_double_buffer=True)
122 123
            in_data, label = fluid.layers.read_file(reader)
            predict_label = fluid.layers.fc(in_data, size=2, act='softmax')
124
            loss = paddle.mean(
125
                fluid.layers.cross_entropy(input=predict_label, label=label))
126 127 128 129 130 131 132 133

            optimizer = fluid.optimizer.Adam()
            optimizer.minimize(loss)

        startup_program = fluid.Program()
        main_program = fluid.Program()
        with fluid.program_guard(main_program, startup_program):
            net()
W
Wu Yi 已提交
134 135
        no_read_program = main_program._inference_optimize()
        keep_read_program = main_program._inference_optimize(
X
Xin Pan 已提交
136
            prune_read_op=False)
137 138 139 140 141 142 143 144 145
        no_read_ops = no_read_program.global_block().ops
        keep_read_ops = keep_read_program.global_block().ops
        self.assertEqual(len(keep_read_ops) - len(no_read_ops), 2)
        self.assertEqual(keep_read_ops[0].type, 'create_double_buffer_reader')
        self.assertEqual(keep_read_ops[1].type, 'read')

        for i in range(len(no_read_ops)):
            self.assertEqual(no_read_ops[i].type, keep_read_ops[i + 2].type)

146 147 148 149
    def test_program_all_parameters(self):
        program = fluid.default_main_program()
        data = fluid.data(name='x', shape=[None, 13], dtype='float32')
        hidden = fluid.layers.fc(input=data, size=10)
150
        loss = paddle.mean(hidden)
151 152 153 154 155 156 157 158
        fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)

        # NOTE: here the parameters are fc_0.w_0 and fc_0.b_0
        param_list = program.all_parameters()
        self.assertEqual(len(param_list), 2)
        self.assertEqual(param_list[0].name, "fc_0.w_0")
        self.assertEqual(param_list[1].name, "fc_0.b_0")

159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175
    def test_prune_with_input_type_error(self):
        program = fluid.default_main_program()
        feed_var_names = [2, 3, 4]
        self.assertRaises(ValueError, program._prune_with_input, feed_var_names,
                          [])

    def test_random_seed_error(self):
        program = fluid.default_main_program()
        with self.assertRaises(ValueError):
            program.random_seed = "seed"

    def test_copy_info_from_error(self):
        program = fluid.default_main_program()
        self.assertRaises(TypeError, program._copy_param_info_from, "program")
        self.assertRaises(TypeError, program._copy_dist_param_info_from,
                          "program")

176
    def test_remove_training_info(self):
177

178
        def net():
179 180 181 182 183
            reader = fluid.layers.py_reader(capacity=10,
                                            shapes=[[-1, 10], [-1, 1]],
                                            lod_levels=[0, 0],
                                            dtypes=['float32', 'int64'],
                                            use_double_buffer=True)
184 185
            in_data, label = fluid.layers.read_file(reader)
            predict_label = fluid.layers.fc(in_data, size=2, act='softmax')
186
            loss = paddle.mean(
187
                fluid.layers.cross_entropy(input=predict_label, label=label))
188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203

            optimizer = fluid.optimizer.Adam()
            optimizer.minimize(loss)

        main_program = fluid.Program()
        with fluid.program_guard(main_program):
            net()

        removed_program = main_program._remove_training_info()

        for i in range(removed_program.num_blocks):
            block = removed_program.block(i)
            for var in block.desc.all_vars():
                self.assertFalse(var.has_is_parameter())
                self.assertFalse(var.has_stop_gradient())

Y
Yu Yang 已提交
204 205 206

if __name__ == '__main__':
    unittest.main()