test_digamma_op.py 4.5 KB
Newer Older
Z
zyfncg 已提交
1
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
2
#
Z
zyfncg 已提交
3 4 5
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
6
#
Z
zyfncg 已提交
7
#     http://www.apache.org/licenses/LICENSE-2.0
8
#
Z
zyfncg 已提交
9 10 11 12 13 14 15 16 17 18 19 20 21 22
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import math
import numpy as np
from scipy.special import psi
import paddle
import paddle.fluid as fluid
import paddle.static as static
from op_test import OpTest
23
from paddle.fluid.framework import _test_eager_guard
Z
zyfncg 已提交
24 25 26


class TestDigammaOp(OpTest):
27

Z
zyfncg 已提交
28 29 30 31 32
    def setUp(self):
        # switch to static
        paddle.enable_static()

        self.op_type = 'digamma'
33
        self.python_api = paddle.digamma
Z
zyfncg 已提交
34 35 36 37 38 39 40 41 42 43 44 45
        self.init_dtype_type()
        shape = (5, 32)
        data = np.random.random(shape).astype(self.dtype) + 1
        self.inputs = {'X': data}
        result = np.ones(shape).astype(self.dtype)
        result = psi(data)
        self.outputs = {'Out': result}

    def init_dtype_type(self):
        self.dtype = np.float64

    def test_check_output(self):
46
        self.check_output(check_eager=True)
Z
zyfncg 已提交
47 48

    def test_check_grad_normal(self):
49
        self.check_grad(['X'], 'Out', check_eager=True)
Z
zyfncg 已提交
50 51 52


class TestDigammaOpFp32(TestDigammaOp):
53

Z
zyfncg 已提交
54 55 56 57 58 59 60 61
    def init_dtype_type(self):
        self.dtype = np.float32

    def test_check_grad_normal(self):
        self.check_grad(['X'], 'Out')


class TestDigammaAPI(unittest.TestCase):
62

Z
zyfncg 已提交
63 64 65 66 67 68 69 70 71 72 73
    def setUp(self):
        # switch to static
        paddle.enable_static()
        # prepare test attrs
        self.dtypes = ["float32", "float64"]
        self.places = [paddle.CPUPlace()]
        if paddle.is_compiled_with_cuda():
            self.places.append(paddle.CUDAPlace(0))
        self._shape = [8, 3, 32, 32]

    def test_in_static_mode(self):
74

Z
zyfncg 已提交
75 76 77 78 79 80 81 82 83 84 85 86 87
        def init_input_output(dtype):
            input = np.random.random(self._shape).astype(dtype)
            return {'x': input}, psi(input)

        for dtype in self.dtypes:
            input_dict, sc_res = init_input_output(dtype)
            for place in self.places:
                with static.program_guard(static.Program()):
                    x = static.data(name="x", shape=self._shape, dtype=dtype)
                    out = paddle.digamma(x)

                    exe = static.Executor(place)
                    out_value = exe.run(feed=input_dict, fetch_list=[out.name])
88
                    np.testing.assert_allclose(out_value[0], sc_res, rtol=1e-05)
Z
zyfncg 已提交
89 90 91 92 93 94 95 96 97 98

    def test_in_dynamic_mode(self):
        for dtype in self.dtypes:
            input = np.random.random(self._shape).astype(dtype)
            sc_res = psi(input)
            for place in self.places:
                # it is more convenient to use `guard` than `enable/disable_**` here
                with fluid.dygraph.guard(place):
                    input_t = paddle.to_tensor(input)
                    res = paddle.digamma(input_t).numpy()
99
                    np.testing.assert_allclose(res, sc_res, rtol=1e-05)
Z
zyfncg 已提交
100

101 102 103 104
    def test_in_eager_dynamic_mode(self):
        with _test_eager_guard():
            self.test_in_dynamic_mode()

Z
zyfncg 已提交
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124
    def test_name_argument(self):
        with static.program_guard(static.Program()):
            x = static.data(name="x", shape=self._shape, dtype=self.dtypes[0])
            out = paddle.digamma(x, name="digamma_res")
            self.assertTrue("digamma_res" in out.name)

    def test_dtype_error(self):
        # in static mode
        with self.assertRaises(TypeError):
            with static.program_guard(static.Program()):
                x = static.data(name="x", shape=self._shape, dtype="int32")
                out = paddle.digamma(x, name="digamma_res")

        # in dynamic mode
        with self.assertRaises(RuntimeError):
            with fluid.dygraph.guard():
                input = np.random.random(self._shape).astype("int32")
                input_t = paddle.to_tensor(input)
                res = paddle.digamma(input_t)

125 126 127 128 129 130 131
        with self.assertRaises(RuntimeError):
            with fluid.dygraph.guard():
                with _test_eager_guard():
                    input = np.random.random(self._shape).astype("int32")
                    input_t = paddle.to_tensor(input)
                    res = paddle.digamma(input_t)

Z
zyfncg 已提交
132 133 134

if __name__ == "__main__":
    unittest.main()