test_rand_op.py 4.6 KB
Newer Older
X
Xing Wu 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
#   Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import numpy as np
from op_test import OpTest

import paddle.fluid.core as core
from paddle import rand
import paddle.fluid as fluid
from paddle.fluid import compiler, Program, program_guard
23
import paddle
X
Xing Wu 已提交
24 25 26 27 28 29 30 31 32 33 34 35 36


class TestRandOpError(unittest.TestCase):
    """
    This class test the input type check.
    """

    def test_errors(self):
        main_prog = Program()
        start_prog = Program()
        with program_guard(main_prog, start_prog):

            def test_Variable():
37 38
                x1 = fluid.create_lod_tensor(np.zeros((4, 784)), [[1, 1, 1, 1]],
                                             fluid.CPUPlace())
X
Xing Wu 已提交
39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55
                rand(x1)

            self.assertRaises(TypeError, test_Variable)

            def test_dtype():
                dim_1 = fluid.layers.fill_constant([1], "int64", 3)
                dim_2 = fluid.layers.fill_constant([1], "int32", 5)
                rand(shape=[dim_1, dim_2], dtype='int32')

            self.assertRaises(TypeError, test_dtype)


class TestRandOp(unittest.TestCase):
    """
    This class test the common usages of randop.
    """

56
    def run_net(self, use_cuda=False):
X
Xing Wu 已提交
57 58 59 60 61 62
        place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
        exe = fluid.Executor(place)

        train_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
63 64 65
            result_0 = rand([3, 4])
            result_1 = rand([3, 4], 'float64')

X
Xing Wu 已提交
66 67 68
            dim_1 = fluid.layers.fill_constant([1], "int64", 3)
            dim_2 = fluid.layers.fill_constant([1], "int32", 5)
            result_2 = rand(shape=[dim_1, dim_2])
69

X
Xing Wu 已提交
70 71
            var_shape = fluid.data(name='var_shape', shape=[2], dtype="int64")
            result_3 = rand(var_shape)
72

73 74 75
            var_shape_int32 = fluid.data(name='var_shape_int32',
                                         shape=[2],
                                         dtype="int32")
X
Xing Wu 已提交
76
            result_4 = rand(var_shape_int32)
77

X
Xing Wu 已提交
78 79 80 81
        exe.run(startup_program)

        x1 = np.array([3, 2]).astype('int64')
        x2 = np.array([4, 3]).astype('int32')
82 83
        ret = exe.run(
            train_program,
84 85 86 87
            feed={
                "var_shape": x1,
                "var_shape_int32": x2
            },
88 89 90 91 92 93
            fetch_list=[result_1, result_1, result_2, result_3, result_4])

    def test_run(self):
        self.run_net(False)
        if core.is_compiled_with_cuda():
            self.run_net(True)
X
Xing Wu 已提交
94 95 96 97 98 99 100


class TestRandOpForDygraph(unittest.TestCase):
    """
    This class test the common usages of randop.
    """

101 102 103 104 105 106 107
    def run_net(self, use_cuda=False):
        place = fluid.CUDAPlace(0) if use_cuda else fluid.CPUPlace()
        with fluid.dygraph.guard(place):
            rand([3, 4])

            rand([3, 4], 'float64')

X
Xing Wu 已提交
108 109 110
            dim_1 = fluid.layers.fill_constant([1], "int64", 3)
            dim_2 = fluid.layers.fill_constant([1], "int32", 5)
            rand(shape=[dim_1, dim_2])
111

X
Xing Wu 已提交
112 113 114
            var_shape = fluid.dygraph.to_variable(np.array([3, 4]))
            rand(var_shape)

115 116 117 118 119
    def test_run(self):
        self.run_net(False)
        if core.is_compiled_with_cuda():
            self.run_net(True)

X
Xing Wu 已提交
120

121
class TestRandDtype(unittest.TestCase):
122

123 124 125
    def test_default_dtype(self):
        paddle.disable_static()

126
        def test_default_fp16():
127 128 129
            paddle.framework.set_default_dtype('float16')
            paddle.tensor.random.rand([2, 3])

130
        self.assertRaises(TypeError, test_default_fp16)
131

132
        def test_default_fp32():
133 134 135 136
            paddle.framework.set_default_dtype('float32')
            out = paddle.tensor.random.rand([2, 3])
            self.assertEqual(out.dtype, fluid.core.VarDesc.VarType.FP32)

137
        def test_default_fp64():
138 139 140 141
            paddle.framework.set_default_dtype('float64')
            out = paddle.tensor.random.rand([2, 3])
            self.assertEqual(out.dtype, fluid.core.VarDesc.VarType.FP64)

142 143
        test_default_fp64()
        test_default_fp32()
144 145 146 147

        paddle.enable_static()


X
Xing Wu 已提交
148 149
if __name__ == "__main__":
    unittest.main()