test_program.py 7.2 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

15
from __future__ import print_function
Y
Yu Yang 已提交
16
import unittest
17

18 19
from paddle.fluid.framework import Program, default_main_program, program_guard, grad_var_name
import paddle.fluid.layers as layers
20
import paddle.fluid as fluid
Y
Yu Yang 已提交
21

Y
Yu Yang 已提交
22 23
main_program = default_main_program()

Y
Yu Yang 已提交
24 25 26

class TestProgram(unittest.TestCase):
    def test_program(self):
Y
Yu Yang 已提交
27
        b = main_program.current_block()
Y
Yu Yang 已提交
28 29 30
        self.assertEqual(-1, b.parent_idx)
        self.assertEqual(0, b.idx)

W
Wu Yi 已提交
31
        b = main_program._create_block()
Y
Yu Yang 已提交
32 33 34
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

W
Wu Yi 已提交
35
        b = main_program._create_block()
Y
Yu Yang 已提交
36 37 38
        self.assertEqual(2, b.idx)
        self.assertEqual(1, b.parent_idx)

W
Wu Yi 已提交
39
        main_program._rollback()
Y
Yu Yang 已提交
40

Y
Yu Yang 已提交
41
        b = main_program.current_block()
Y
Yu Yang 已提交
42 43 44
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

W
Wu Yi 已提交
45
        b = main_program._create_block()
Y
Yu Yang 已提交
46 47 48
        self.assertEqual(3, b.idx)
        self.assertEqual(1, b.parent_idx)

W
Wu Yi 已提交
49
        main_program._rollback()
Y
Yu Yang 已提交
50
        b = main_program.current_block()
Y
Yu Yang 已提交
51 52 53
        self.assertEqual(1, b.idx)
        self.assertEqual(0, b.parent_idx)

Y
Yu Yang 已提交
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68
    def test_program_clone(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        # FIXME(yuyang18): We manual compare the output string, since the order
        # of variable could be changed.
69 70
        print(prog)
        print(prog.clone())
Y
Yu Yang 已提交
71

72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87
    def test_parse_program_from_string(self):
        prog = Program()

        x = prog.global_block().create_var(
            name='X', shape=[1000, 784], dtype='float32')

        y = prog.global_block().create_var(
            name='Y', shape=[784, 100], dtype='float32')
        out = prog.global_block().create_var(name='Out', dtype='float32')
        prog.global_block().append_op(
            type="mul", inputs={'X': [x],
                                'Y': [y]}, outputs={'Out': [out]})

        binary_str = prog.desc.serialize_to_string()
        prog_restored = Program.parse_from_string(binary_str)

88 89
        print(prog)
        print(prog_restored)
90

91 92 93
    def test_program_clone_with_parameter(self):
        main_program = Program()
        startup_program = Program()
94 95 96 97
        with program_guard(main_program, startup_program):
            d = layers.data(name='x', shape=[784], dtype='float32')
            hidden = layers.fc(input=d, size=100)
            layers.fc(input=hidden, size=100)
98 99 100 101

        new_program = main_program.clone()
        self.assertNotEqual(0, len(new_program.blocks[0].all_parameters()))

102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122
    def test_program_inference_optimize(self):
        def net():
            reader = fluid.layers.py_reader(
                capacity=10,
                shapes=[[-1, 10], [-1, 1]],
                lod_levels=[0, 0],
                dtypes=['float32', 'int64'],
                use_double_buffer=True)
            in_data, label = fluid.layers.read_file(reader)
            predict_label = fluid.layers.fc(in_data, size=2, act='softmax')
            loss = fluid.layers.mean(
                fluid.layers.cross_entropy(
                    input=predict_label, label=label))

            optimizer = fluid.optimizer.Adam()
            optimizer.minimize(loss)

        startup_program = fluid.Program()
        main_program = fluid.Program()
        with fluid.program_guard(main_program, startup_program):
            net()
W
Wu Yi 已提交
123 124
        no_read_program = main_program._inference_optimize()
        keep_read_program = main_program._inference_optimize(
X
Xin Pan 已提交
125
            prune_read_op=False)
126 127 128 129 130 131 132 133 134
        no_read_ops = no_read_program.global_block().ops
        keep_read_ops = keep_read_program.global_block().ops
        self.assertEqual(len(keep_read_ops) - len(no_read_ops), 2)
        self.assertEqual(keep_read_ops[0].type, 'create_double_buffer_reader')
        self.assertEqual(keep_read_ops[1].type, 'read')

        for i in range(len(no_read_ops)):
            self.assertEqual(no_read_ops[i].type, keep_read_ops[i + 2].type)

135 136 137 138 139 140 141 142 143 144 145 146 147
    def test_program_all_parameters(self):
        program = fluid.default_main_program()
        data = fluid.data(name='x', shape=[None, 13], dtype='float32')
        hidden = fluid.layers.fc(input=data, size=10)
        loss = fluid.layers.mean(hidden)
        fluid.optimizer.SGD(learning_rate=0.01).minimize(loss)

        # NOTE: here the parameters are fc_0.w_0 and fc_0.b_0
        param_list = program.all_parameters()
        self.assertEqual(len(param_list), 2)
        self.assertEqual(param_list[0].name, "fc_0.w_0")
        self.assertEqual(param_list[1].name, "fc_0.b_0")

148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164
    def test_prune_with_input_type_error(self):
        program = fluid.default_main_program()
        feed_var_names = [2, 3, 4]
        self.assertRaises(ValueError, program._prune_with_input, feed_var_names,
                          [])

    def test_random_seed_error(self):
        program = fluid.default_main_program()
        with self.assertRaises(ValueError):
            program.random_seed = "seed"

    def test_copy_info_from_error(self):
        program = fluid.default_main_program()
        self.assertRaises(TypeError, program._copy_param_info_from, "program")
        self.assertRaises(TypeError, program._copy_dist_param_info_from,
                          "program")

165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193
    def test_remove_training_info(self):
        def net():
            reader = fluid.layers.py_reader(
                capacity=10,
                shapes=[[-1, 10], [-1, 1]],
                lod_levels=[0, 0],
                dtypes=['float32', 'int64'],
                use_double_buffer=True)
            in_data, label = fluid.layers.read_file(reader)
            predict_label = fluid.layers.fc(in_data, size=2, act='softmax')
            loss = fluid.layers.mean(
                fluid.layers.cross_entropy(
                    input=predict_label, label=label))

            optimizer = fluid.optimizer.Adam()
            optimizer.minimize(loss)

        main_program = fluid.Program()
        with fluid.program_guard(main_program):
            net()

        removed_program = main_program._remove_training_info()

        for i in range(removed_program.num_blocks):
            block = removed_program.block(i)
            for var in block.desc.all_vars():
                self.assertFalse(var.has_is_parameter())
                self.assertFalse(var.has_stop_gradient())

Y
Yu Yang 已提交
194 195 196

if __name__ == '__main__':
    unittest.main()