diff --git a/python/paddle/fluid/tests/unittests/test_initializer_nn.py b/python/paddle/fluid/tests/unittests/test_initializer_nn.py new file mode 100644 index 0000000000000000000000000000000000000000..6ad19658fd20376cbdd1d370fd0268c163183f4f --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_initializer_nn.py @@ -0,0 +1,108 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import numpy as np +import unittest + +import paddle +import paddle.nn as nn +import paddle.fluid as fluid +import paddle.fluid.framework as framework +import paddle.nn.initializer as initializer +from paddle.fluid.core import VarDesc + +DELTA = 0.00001 + + +def check_cast_op(op): + return op.type == 'cast' and \ + op.attr('in_dtype') == VarDesc.VarType.FP32 and \ + op.attr('out_dtype') == VarDesc.VarType.FP16 + + +class TestConstantInitializer(unittest.TestCase): + def static_test_constant_initializer_common(self, + init_inst, + dtype="float32", + value_target=0.0): + paddle.enable_static() + program = framework.Program() + block = program.global_block() + for _ in range(2): + block.create_parameter( + dtype=dtype, + shape=[5, 10], + lod_level=0, + name="param", + initializer=init_inst) + num_ops = 2 if dtype == "float16" else 1 + self.assertEqual(len(block.ops), num_ops) + init_op = block.ops[0] + self.assertEqual(init_op.type, 'fill_constant') + self.assertAlmostEqual(init_op.attr('value'), value_target, delta=DELTA) + paddle.disable_static() + return block + + def test_constant_initializer_default_value_static(self, dtype="float32"): + """Test the constant initializer with default value in static graph + """ + block = self.static_test_constant_initializer_common( + init_inst=initializer.Constant(), dtype=dtype, value_target=0.0) + return block + + def test_constant_initializer_default_value_dygraph(self, dtype="float32"): + """Test constant initializer with supplied value in dygraph + """ + with fluid.dygraph.guard(): + linear = nn.Linear(2, 4, weight_attr=nn.initializer.Constant()) + mat_target = np.ones((2, 4), dtype=dtype) * 0.0 + mat_linear = linear.weight.numpy() + mismatch = np.sum( + (mat_target - mat_linear) * (mat_target - mat_linear)) + self.assertAlmostEqual(mismatch, 0.0, delta=DELTA) + + def test_constant_initializer_static(self, dtype="float32"): + """Test constant initializer with supplied value in static graph + """ + block = self.static_test_constant_initializer_common( + init_inst=initializer.Constant(2.3), dtype=dtype, value_target=2.3) + return block + + def test_constant_initializer_dygraph(self, dtype="float32"): + """Test constant initializer with supplied value in dygraph + """ + with fluid.dygraph.guard(): + linear = nn.Linear( + 2, 4, weight_attr=nn.initializer.Constant(value=2.0)) + mat_target = np.ones((2, 4), dtype=dtype) * 2.0 + mat_linear = linear.weight.numpy() + mismatch = np.sum( + (mat_target - mat_linear) * (mat_target - mat_linear)) + self.assertAlmostEqual(mismatch, 0.0, delta=DELTA) + + def test_constant_initializer_fp16(self): + """Test constant initializer with float16 + """ + block = self.test_constant_initializer_default_value_static("float16") + self.assertTrue(check_cast_op(block.ops[1])) + block = self.test_constant_initializer_static("float16") + self.assertTrue(check_cast_op(block.ops[1])) + self.test_constant_initializer_default_value_dygraph("float16") + self.test_constant_initializer_dygraph("float16") + + +if __name__ == '__main__': + unittest.main() diff --git a/python/paddle/nn/initializer/__init__.py b/python/paddle/nn/initializer/__init__.py index 489f324868a3ed345c021ae8d78285266cacafb1..db0f5dbff2b80bfb1db95bdeed20f937dc8b242a 100644 --- a/python/paddle/nn/initializer/__init__.py +++ b/python/paddle/nn/initializer/__init__.py @@ -13,21 +13,23 @@ # limitations under the License. # TODO: define the initializers to create a Parameter in neural network - from ...fluid.initializer import Bilinear #DEFINE_ALIAS -from ...fluid.initializer import Constant #DEFINE_ALIAS from ...fluid.initializer import MSRA #DEFINE_ALIAS from ...fluid.initializer import Normal #DEFINE_ALIAS from ...fluid.initializer import TruncatedNormal #DEFINE_ALIAS from ...fluid.initializer import Uniform #DEFINE_ALIAS from ...fluid.initializer import Xavier #DEFINE_ALIAS +from . import constant +from .constant import Constant #DEFINE_ALIAS + __all__ = [ 'Bilinear', - 'Constant', 'MSRA', 'Normal', 'TruncatedNormal', 'Uniform', 'Xavier', ] + +__all__ += constant.__all__ diff --git a/python/paddle/nn/initializer/constant.py b/python/paddle/nn/initializer/constant.py new file mode 100644 index 0000000000000000000000000000000000000000..6d21ddae0d16b5003bc6766b4106dd937727c2b1 --- /dev/null +++ b/python/paddle/nn/initializer/constant.py @@ -0,0 +1,46 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: define the initializers of Constant in neural network +from ...fluid.initializer import ConstantInitializer + +__all__ = ['Constant'] + + +class Constant(ConstantInitializer): + """Implement the constant initializer. + + Args: + value (float32): constant value to initialize the parameter + + Examples: + .. code-block:: python + + import paddle + import paddle.nn as nn + + data = paddle.rand([30, 10, 2], dtype='float32') + linear = nn.Linear(2, + 4, + weight_attr=nn.initializer.Constant(value=2.0)) + res = linear(data) + print(linear.weight.numpy()) + #result is [[2. 2. 2. 2.],[2. 2. 2. 2.]] + + """ + + def __init__(self, value=0.0): + if value is None: + raise ValueError("value must not be none.") + super(Constant, self).__init__(value=value, force_cpu=False)