未验证 提交 00d401ec 编写于 作者: Z zhulei 提交者: GitHub

Add api of constant in paddle.nn.initializer (#27786)

* Add api of constant in paddle.nn.initializer

* Add api of constant in paddle.nn.initializer

* Add api of constant in paddle.nn.initializer

* Add api of constant in paddle.nn.initializer

* Add api of constant in paddle.nn.initializer

* Add api of constant in paddle.nn.initializer

* Add api of constant in paddle.nn.initializer
上级 b8d2a021
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import unittest
import paddle
import paddle.nn as nn
import paddle.fluid as fluid
import paddle.fluid.framework as framework
import paddle.nn.initializer as initializer
from paddle.fluid.core import VarDesc
DELTA = 0.00001
def check_cast_op(op):
return op.type == 'cast' and \
op.attr('in_dtype') == VarDesc.VarType.FP32 and \
op.attr('out_dtype') == VarDesc.VarType.FP16
class TestConstantInitializer(unittest.TestCase):
def static_test_constant_initializer_common(self,
init_inst,
dtype="float32",
value_target=0.0):
paddle.enable_static()
program = framework.Program()
block = program.global_block()
for _ in range(2):
block.create_parameter(
dtype=dtype,
shape=[5, 10],
lod_level=0,
name="param",
initializer=init_inst)
num_ops = 2 if dtype == "float16" else 1
self.assertEqual(len(block.ops), num_ops)
init_op = block.ops[0]
self.assertEqual(init_op.type, 'fill_constant')
self.assertAlmostEqual(init_op.attr('value'), value_target, delta=DELTA)
paddle.disable_static()
return block
def test_constant_initializer_default_value_static(self, dtype="float32"):
"""Test the constant initializer with default value in static graph
"""
block = self.static_test_constant_initializer_common(
init_inst=initializer.Constant(), dtype=dtype, value_target=0.0)
return block
def test_constant_initializer_default_value_dygraph(self, dtype="float32"):
"""Test constant initializer with supplied value in dygraph
"""
with fluid.dygraph.guard():
linear = nn.Linear(2, 4, weight_attr=nn.initializer.Constant())
mat_target = np.ones((2, 4), dtype=dtype) * 0.0
mat_linear = linear.weight.numpy()
mismatch = np.sum(
(mat_target - mat_linear) * (mat_target - mat_linear))
self.assertAlmostEqual(mismatch, 0.0, delta=DELTA)
def test_constant_initializer_static(self, dtype="float32"):
"""Test constant initializer with supplied value in static graph
"""
block = self.static_test_constant_initializer_common(
init_inst=initializer.Constant(2.3), dtype=dtype, value_target=2.3)
return block
def test_constant_initializer_dygraph(self, dtype="float32"):
"""Test constant initializer with supplied value in dygraph
"""
with fluid.dygraph.guard():
linear = nn.Linear(
2, 4, weight_attr=nn.initializer.Constant(value=2.0))
mat_target = np.ones((2, 4), dtype=dtype) * 2.0
mat_linear = linear.weight.numpy()
mismatch = np.sum(
(mat_target - mat_linear) * (mat_target - mat_linear))
self.assertAlmostEqual(mismatch, 0.0, delta=DELTA)
def test_constant_initializer_fp16(self):
"""Test constant initializer with float16
"""
block = self.test_constant_initializer_default_value_static("float16")
self.assertTrue(check_cast_op(block.ops[1]))
block = self.test_constant_initializer_static("float16")
self.assertTrue(check_cast_op(block.ops[1]))
self.test_constant_initializer_default_value_dygraph("float16")
self.test_constant_initializer_dygraph("float16")
if __name__ == '__main__':
unittest.main()
...@@ -13,21 +13,23 @@ ...@@ -13,21 +13,23 @@
# limitations under the License. # limitations under the License.
# TODO: define the initializers to create a Parameter in neural network # TODO: define the initializers to create a Parameter in neural network
from ...fluid.initializer import Bilinear #DEFINE_ALIAS from ...fluid.initializer import Bilinear #DEFINE_ALIAS
from ...fluid.initializer import Constant #DEFINE_ALIAS
from ...fluid.initializer import MSRA #DEFINE_ALIAS from ...fluid.initializer import MSRA #DEFINE_ALIAS
from ...fluid.initializer import Normal #DEFINE_ALIAS from ...fluid.initializer import Normal #DEFINE_ALIAS
from ...fluid.initializer import TruncatedNormal #DEFINE_ALIAS from ...fluid.initializer import TruncatedNormal #DEFINE_ALIAS
from ...fluid.initializer import Uniform #DEFINE_ALIAS from ...fluid.initializer import Uniform #DEFINE_ALIAS
from ...fluid.initializer import Xavier #DEFINE_ALIAS from ...fluid.initializer import Xavier #DEFINE_ALIAS
from . import constant
from .constant import Constant #DEFINE_ALIAS
__all__ = [ __all__ = [
'Bilinear', 'Bilinear',
'Constant',
'MSRA', 'MSRA',
'Normal', 'Normal',
'TruncatedNormal', 'TruncatedNormal',
'Uniform', 'Uniform',
'Xavier', 'Xavier',
] ]
__all__ += constant.__all__
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: define the initializers of Constant in neural network
from ...fluid.initializer import ConstantInitializer
__all__ = ['Constant']
class Constant(ConstantInitializer):
"""Implement the constant initializer.
Args:
value (float32): constant value to initialize the parameter
Examples:
.. code-block:: python
import paddle
import paddle.nn as nn
data = paddle.rand([30, 10, 2], dtype='float32')
linear = nn.Linear(2,
4,
weight_attr=nn.initializer.Constant(value=2.0))
res = linear(data)
print(linear.weight.numpy())
#result is [[2. 2. 2. 2.],[2. 2. 2. 2.]]
"""
def __init__(self, value=0.0):
if value is None:
raise ValueError("value must not be none.")
super(Constant, self).__init__(value=value, force_cpu=False)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册