未验证 提交 ab471584 编写于 作者: W Wu Yi 提交者: GitHub

fix default create_parameter dtype maching initializers (#15521)

* fix default create_parameter dtype maching initializers test=develop

* update type check test=develop

* update test=develop
上级 67e4450c
...@@ -300,6 +300,17 @@ class LayerHelper(object): ...@@ -300,6 +300,17 @@ class LayerHelper(object):
attr.name = unique_name.generate(".".join([self.name, suffix])) attr.name = unique_name.generate(".".join([self.name, suffix]))
if default_initializer is None and attr.initializer is None: if default_initializer is None and attr.initializer is None:
if isinstance(dtype, core.VarDesc.VarType):
if dtype != core.VarDesc.VarType.FP32 and \
dtype != core.VarDesc.VarType.FP64:
raise TypeError(
"Can not create parameter with default initializer when dtype is not float type. Set default_initializer to fit the parameter dtype!"
)
else:
if not (dtype.startswith("float") or dtype == "double"):
raise TypeError(
"Can not create parameter with default initializer when dtype is not float type. Set default_initializer to fit the parameter dtype!"
)
if is_bias: if is_bias:
attr._set_default_bias_initializer() attr._set_default_bias_initializer()
else: else:
......
...@@ -58,7 +58,8 @@ class TestBook(unittest.TestCase): ...@@ -58,7 +58,8 @@ class TestBook(unittest.TestCase):
def test_simple_conv2d(self): def test_simple_conv2d(self):
program = Program() program = Program()
with program_guard(program, startup_program=Program()): with program_guard(program, startup_program=Program()):
images = layers.data(name='pixel', shape=[3, 48, 48], dtype='int32') images = layers.data(
name='pixel', shape=[3, 48, 48], dtype='float32')
layers.conv2d(input=images, num_filters=3, filter_size=[4, 4]) layers.conv2d(input=images, num_filters=3, filter_size=[4, 4])
print(str(program)) print(str(program))
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册