test_print_op.py 2.5 KB
Newer Older
1
#   Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
D
dzhwinter 已提交
2
#
D
dzhwinter 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
D
dzhwinter 已提交
6
#
D
dzhwinter 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
D
dzhwinter 已提交
8
#
D
dzhwinter 已提交
9 10 11 12 13 14
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

Y
Yan Chunwei 已提交
15
import unittest
16 17 18 19 20 21
import paddle.fluid.core as core
from paddle.fluid.executor import Executor
import paddle.fluid.layers as layers
from paddle.fluid.backward import append_backward
from paddle.fluid.framework import switch_main_program
from paddle.fluid.framework import Program
Y
yangyaming 已提交
22 23 24 25 26 27 28 29 30 31
import numpy as np


class TestPrintOpCPU(unittest.TestCase):
    def setUp(self):
        self.place = core.CPUPlace()
        self.x_tensor = core.LoDTensor()
        tensor_np = np.random.random(size=(2, 3)).astype('float32')
        self.x_tensor.set(tensor_np, self.place)
        self.x_tensor.set_lod([[0, 1, 1]])
Y
Yan Chunwei 已提交
32

Y
yangyaming 已提交
33 34 35 36 37 38 39 40
    def build_network(self, only_forward, **kargs):
        x = layers.data('x', shape=[3], dtype='float32', lod_level=1)
        x.stop_gradient = False
        printed = layers.Print(input=x, **kargs)
        if only_forward: return printed
        loss = layers.mean(x=printed)
        append_backward(loss=loss)
        return loss
Y
Yan Chunwei 已提交
41

Y
yangyaming 已提交
42 43 44 45 46 47 48
    def test_forward(self):
        switch_main_program(Program())
        printed = self.build_network(True, print_phase='forward')
        exe = Executor(self.place)
        outs = exe.run(feed={'x': self.x_tensor},
                       fetch_list=[printed],
                       return_numpy=False)
Y
Yan Chunwei 已提交
49

Y
yangyaming 已提交
50 51 52 53 54 55 56
    def test_backward(self):
        switch_main_program(Program())
        loss = self.build_network(False, print_phase='backward')
        exe = Executor(self.place)
        outs = exe.run(feed={'x': self.x_tensor},
                       fetch_list=[loss],
                       return_numpy=False)
Y
Yan Chunwei 已提交
57 58


Y
yangyaming 已提交
59 60 61 62 63 64 65
class TestPrintOpGPU(TestPrintOpCPU):
    def setUp(self):
        self.place = core.CUDAPlace(0)
        self.x_tensor = core.LoDTensor()
        tensor_np = np.random.random(size=(2, 3)).astype('float32')
        self.x_tensor.set(tensor_np, self.place)
        self.x_tensor.set_lod([[0, 1, 1]])
Y
Yan Chunwei 已提交
66 67 68 69


if __name__ == '__main__':
    unittest.main()