test_imperative.py 1.6 KB
Newer Older
X
Xin Pan 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import sys
import numpy as np

import paddle.fluid as fluid
from paddle.fluid import core


class MyLayer(fluid.imperative.PyLayer):
    def __init__(self):
        super(MyLayer, self).__init__()

    def forward(self, inputs):
        x = fluid.layers.relu(inputs[0])
        self._x_for_debug = x
        return [fluid.layers.elementwise_mul(x, x)]


class TestImperative(unittest.TestCase):
    def test_layer(self):
        with fluid.imperative.guard():
            cl = core.Layer()
            cl.forward([])
            l = fluid.imperative.PyLayer()
            l.forward([])

    def test_layer_in_out(self):
        with fluid.imperative.guard():
            l = MyLayer()
            x = l(np.array([1.0, 2.0, -1.0], dtype=np.float32))[0]
            self.assertIsNotNone(x)
            sys.stderr.write("%s output: %s\n" % (x, x._numpy()))
            x._backward()
            sys.stderr.write("grad %s\n" % l._x_for_debug._gradient())


if __name__ == '__main__':
    unittest.main()