test_backward.py 3.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function

import unittest
import paddle.fluid as fluid
from simple_nets import init_data


22
def case1_fill_grad_vars():
23 24 25 26 27 28 29 30 31 32
    x = fluid.layers.data(name='image', shape=[784], dtype='float32')
    label = fluid.layers.data(name='label', shape=[1], dtype='int64')
    feature = fluid.layers.fc(input=x, size=20, act=None)
    part1, part2 = fluid.layers.split(feature, num_or_sections=[10, 10], dim=1)
    # Note that: part2 is not used.
    loss = fluid.layers.cross_entropy(input=part1, label=label)
    loss = fluid.layers.mean(loss)
    return loss


33
def case2_prune_no_grad_branch():
34 35 36 37 38 39 40 41 42 43 44
    x = fluid.layers.data(name='image', shape=[784], dtype='float32')
    label = fluid.layers.data(name='label', shape=[1], dtype='int64')
    feature = fluid.layers.fc(input=x, size=10, act=None)
    label = fluid.layers.cast(label, dtype="float32")
    label = fluid.layers.cast(label, dtype='int64')
    # Note that the label is not persistable in fluid.layers.cross_entropy.
    loss = fluid.layers.cross_entropy(input=feature, label=label)
    loss = fluid.layers.mean(loss)
    return loss


45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
def case3_prune_no_grad_branch2():
    label = fluid.layers.data(name='label', shape=[1], dtype='int64')
    label = fluid.layers.cast(label, dtype="float32")
    label = fluid.layers.cast(label, dtype='int64')
    out = fluid.layers.one_hot(input=label, depth=100)
    loss = fluid.layers.mean(out)
    return loss


def case4_with_no_grad_op_maker():
    out = fluid.layers.gaussian_random(shape=[20, 30])
    loss = fluid.layers.mean(out)
    return loss


60
class TestBackward(unittest.TestCase):
61
    def check_backward(self, model, feed_dict):
62 63 64 65 66 67 68 69 70 71 72 73 74
        place = fluid.CPUPlace()
        exe = fluid.Executor(place)

        main = fluid.Program()
        startup = fluid.Program()

        with fluid.program_guard(main, startup):
            loss = model()

            optimizer = fluid.optimizer.SGD(learning_rate=0.1)
            optimizer.minimize(loss)

            exe.run(fluid.default_startup_program())
75
            exe.run(feed=feed_dict)
76 77

    def test_backward(self):
78 79 80 81 82 83 84
        batch_size = 2
        img, label = init_data(batch_size, img_shape=[784], label_range=9)
        feed_dict = {'image': img, 'label': label}
        self.check_backward(case1_fill_grad_vars, feed_dict)
        self.check_backward(case2_prune_no_grad_branch, feed_dict)
        self.check_backward(case3_prune_no_grad_branch2, {'label': label})
        self.check_backward(case4_with_no_grad_op_maker, {})
85 86 87 88


if __name__ == '__main__':
    unittest.main()