diff --git a/python/paddle/fluid/layers/tensor.py b/python/paddle/fluid/layers/tensor.py index 0f098c370792605aecd83d4aa70bb2675aa0c481..6dca5dd2bd4a4d6860edd13db8895350646726e4 100644 --- a/python/paddle/fluid/layers/tensor.py +++ b/python/paddle/fluid/layers/tensor.py @@ -13,9 +13,11 @@ # limitations under the License. from __future__ import print_function +import six from six.moves import reduce from ..layer_helper import LayerHelper from ..param_attr import ParamAttr +from ..initializer import Initializer from ..framework import convert_np_dtype_to_dtype_, in_dygraph_mode, _varbase_creator from ..framework import Variable from ..initializer import Constant @@ -101,10 +103,30 @@ def create_parameter(shape, import paddle.fluid.layers as layers W = layers.create_parameter(shape=[784, 200], dtype='float32') """ + check_type(shape, 'shape', (list, tuple, numpy.ndarray), 'create_parameter') + for item in shape: + if six.PY2: + check_type(item, 'item of shape', + (int, long, numpy.uint8, numpy.int8, numpy.int16, + numpy.int32, numpy.int64), 'create_parameter') + else: + check_type(item, 'item of shape', + (int, numpy.uint8, numpy.int8, numpy.int16, numpy.int32, + numpy.int64), 'create_parameter') + + check_dtype(dtype, 'dtype', [ + 'bool', 'float16', 'float32', 'float64', 'int8', 'int16', 'int32', + 'int64', 'uint8' + ], 'create_parameter') + check_type(attr, 'attr', (type(None), ParamAttr), 'create_parameter') + check_type(default_initializer, 'default_initializer', + (type(None), Initializer), 'create_parameter') + helper = LayerHelper("create_parameter", **locals()) if attr is None: attr = ParamAttr(name=name) - return helper.create_parameter(attr, shape, dtype, is_bias, + return helper.create_parameter(attr, shape, + convert_dtype(dtype), is_bias, default_initializer) @@ -140,6 +162,23 @@ def create_global_var(shape, var = layers.create_global_var(shape=[2,3], value=1.0, dtype='float32', persistable=True, force_cpu=True, name='new_var') """ + check_type(shape, 'shape', (list, tuple, numpy.ndarray), + 'create_global_var') + for item in shape: + if six.PY2: + check_type(item, 'item of shape', + (int, long, numpy.uint8, numpy.int8, numpy.int16, + numpy.int32, numpy.int64), 'create_global_var') + else: + check_type(item, 'item of shape', + (int, numpy.uint8, numpy.int8, numpy.int16, numpy.int32, + numpy.int64), 'create_global_var') + + check_dtype(dtype, 'dtype', [ + 'bool', 'float16', 'float32', 'float64', 'int8', 'int16', 'int32', + 'int64', 'uint8' + ], 'create_global_var') + helper = LayerHelper("global_var", **locals()) var = helper.create_global_variable( dtype=dtype, diff --git a/python/paddle/fluid/tests/unittests/test_create_global_var.py b/python/paddle/fluid/tests/unittests/test_create_global_var.py new file mode 100644 index 0000000000000000000000000000000000000000..140d476967747571d61a5fc9f3ed1a88cddbdd95 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_create_global_var.py @@ -0,0 +1,45 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os, shutil +import unittest +import numpy as np +import paddle.fluid as fluid +from paddle.fluid import Program, program_guard + + +class TestCreateGlobalVarError(unittest.TestCase): + def test_errors(self): + with program_guard(Program(), Program()): + + def test_shape(): + fluid.layers.create_global_var(1, 2.0, np.float32) + + self.assertRaises(TypeError, test_shape) + + def test_shape_item(): + fluid.layers.create_global_var([1.0, 2.0, 3.0], 2.0, 'float32') + + self.assertRaises(TypeError, test_shape_item) + + # Since create_global_var support all dtype in convert_dtype(). + # Hence, assertRaises ValueError not TypeError. + def test_dtype(): + fluid.layers.create_global_var([1, 2, 3], 2.0, np.complex128) + + self.assertRaises(ValueError, test_dtype) + + +if __name__ == '__main__': + unittest.main() diff --git a/python/paddle/fluid/tests/unittests/test_create_parameter.py b/python/paddle/fluid/tests/unittests/test_create_parameter.py new file mode 100644 index 0000000000000000000000000000000000000000..763fb64816c9c66055b3ead2886e4ba29e0406f7 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_create_parameter.py @@ -0,0 +1,53 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os, shutil +import unittest +import numpy as np +import paddle.fluid as fluid +from paddle.fluid import Program, program_guard +from paddle.fluid import ParamAttr, initializer + + +class TestCreateParameterError(unittest.TestCase): + def test_errors(self): + with program_guard(Program(), Program()): + + def test_shape(): + fluid.layers.create_parameter(1, np.float32) + + self.assertRaises(TypeError, test_shape) + + def test_shape_item(): + fluid.layers.create_parameter([1.0, 2.0, 3.0], "float32") + + self.assertRaises(TypeError, test_shape_item) + + def test_attr(): + fluid.layers.create_parameter( + [1, 2, 3], np.float32, attr=np.array([i for i in range(6)])) + + self.assertRaises(TypeError, test_attr) + + def test_default_initializer(): + fluid.layers.create_parameter( + [1, 2, 3], + np.float32, + default_initializer=np.array([i for i in range(6)])) + + self.assertRaises(TypeError, test_default_initializer) + + +if __name__ == '__main__': + unittest.main()