未验证 提交 10daf977 编写于 作者: L liu zhengxi 提交者: GitHub

API (create_parameter, create_global_var) error message enhancement (#23623) (#23947)

* error message enhancement create_parameter create_global_var, test=develop

* use six.PY2 to use long, test=develop

* fix input dtype for create_parameter, test=develop
上级 18056f7d
...@@ -13,9 +13,11 @@ ...@@ -13,9 +13,11 @@
# limitations under the License. # limitations under the License.
from __future__ import print_function from __future__ import print_function
import six
from six.moves import reduce from six.moves import reduce
from ..layer_helper import LayerHelper from ..layer_helper import LayerHelper
from ..param_attr import ParamAttr from ..param_attr import ParamAttr
from ..initializer import Initializer
from ..framework import convert_np_dtype_to_dtype_, in_dygraph_mode, _varbase_creator from ..framework import convert_np_dtype_to_dtype_, in_dygraph_mode, _varbase_creator
from ..framework import Variable from ..framework import Variable
from ..initializer import Constant from ..initializer import Constant
...@@ -101,10 +103,30 @@ def create_parameter(shape, ...@@ -101,10 +103,30 @@ def create_parameter(shape,
import paddle.fluid.layers as layers import paddle.fluid.layers as layers
W = layers.create_parameter(shape=[784, 200], dtype='float32') W = layers.create_parameter(shape=[784, 200], dtype='float32')
""" """
check_type(shape, 'shape', (list, tuple, numpy.ndarray), 'create_parameter')
for item in shape:
if six.PY2:
check_type(item, 'item of shape',
(int, long, numpy.uint8, numpy.int8, numpy.int16,
numpy.int32, numpy.int64), 'create_parameter')
else:
check_type(item, 'item of shape',
(int, numpy.uint8, numpy.int8, numpy.int16, numpy.int32,
numpy.int64), 'create_parameter')
check_dtype(dtype, 'dtype', [
'bool', 'float16', 'float32', 'float64', 'int8', 'int16', 'int32',
'int64', 'uint8'
], 'create_parameter')
check_type(attr, 'attr', (type(None), ParamAttr), 'create_parameter')
check_type(default_initializer, 'default_initializer',
(type(None), Initializer), 'create_parameter')
helper = LayerHelper("create_parameter", **locals()) helper = LayerHelper("create_parameter", **locals())
if attr is None: if attr is None:
attr = ParamAttr(name=name) attr = ParamAttr(name=name)
return helper.create_parameter(attr, shape, dtype, is_bias, return helper.create_parameter(attr, shape,
convert_dtype(dtype), is_bias,
default_initializer) default_initializer)
...@@ -140,6 +162,23 @@ def create_global_var(shape, ...@@ -140,6 +162,23 @@ def create_global_var(shape,
var = layers.create_global_var(shape=[2,3], value=1.0, dtype='float32', var = layers.create_global_var(shape=[2,3], value=1.0, dtype='float32',
persistable=True, force_cpu=True, name='new_var') persistable=True, force_cpu=True, name='new_var')
""" """
check_type(shape, 'shape', (list, tuple, numpy.ndarray),
'create_global_var')
for item in shape:
if six.PY2:
check_type(item, 'item of shape',
(int, long, numpy.uint8, numpy.int8, numpy.int16,
numpy.int32, numpy.int64), 'create_global_var')
else:
check_type(item, 'item of shape',
(int, numpy.uint8, numpy.int8, numpy.int16, numpy.int32,
numpy.int64), 'create_global_var')
check_dtype(dtype, 'dtype', [
'bool', 'float16', 'float32', 'float64', 'int8', 'int16', 'int32',
'int64', 'uint8'
], 'create_global_var')
helper = LayerHelper("global_var", **locals()) helper = LayerHelper("global_var", **locals())
var = helper.create_global_variable( var = helper.create_global_variable(
dtype=dtype, dtype=dtype,
......
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, shutil
import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
class TestCreateGlobalVarError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
def test_shape():
fluid.layers.create_global_var(1, 2.0, np.float32)
self.assertRaises(TypeError, test_shape)
def test_shape_item():
fluid.layers.create_global_var([1.0, 2.0, 3.0], 2.0, 'float32')
self.assertRaises(TypeError, test_shape_item)
# Since create_global_var support all dtype in convert_dtype().
# Hence, assertRaises ValueError not TypeError.
def test_dtype():
fluid.layers.create_global_var([1, 2, 3], 2.0, np.complex128)
self.assertRaises(ValueError, test_dtype)
if __name__ == '__main__':
unittest.main()
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os, shutil
import unittest
import numpy as np
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
from paddle.fluid import ParamAttr, initializer
class TestCreateParameterError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
def test_shape():
fluid.layers.create_parameter(1, np.float32)
self.assertRaises(TypeError, test_shape)
def test_shape_item():
fluid.layers.create_parameter([1.0, 2.0, 3.0], "float32")
self.assertRaises(TypeError, test_shape_item)
def test_attr():
fluid.layers.create_parameter(
[1, 2, 3], np.float32, attr=np.array([i for i in range(6)]))
self.assertRaises(TypeError, test_attr)
def test_default_initializer():
fluid.layers.create_parameter(
[1, 2, 3],
np.float32,
default_initializer=np.array([i for i in range(6)]))
self.assertRaises(TypeError, test_default_initializer)
if __name__ == '__main__':
unittest.main()
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册