From 948bc8b7bba8310faab642a624cf2b689d0831da Mon Sep 17 00:00:00 2001 From: LielinJiang <50691816+LielinJiang@users.noreply.github.com> Date: Wed, 5 Aug 2020 12:11:04 +0800 Subject: [PATCH] Add apply for Layer (#25812) * add apply for Layer --- python/paddle/fluid/dygraph/layers.py | 39 ++++++++ .../unittests/test_imperative_layer_apply.py | 90 +++++++++++++++++++ 2 files changed, 129 insertions(+) create mode 100644 python/paddle/fluid/tests/unittests/test_imperative_layer_apply.py diff --git a/python/paddle/fluid/dygraph/layers.py b/python/paddle/fluid/dygraph/layers.py index 56738677172..72f105933dc 100644 --- a/python/paddle/fluid/dygraph/layers.py +++ b/python/paddle/fluid/dygraph/layers.py @@ -129,6 +129,45 @@ class Layer(core.Layer): for layer in self.sublayers(): layer.eval() + def apply(self, fn): + """ + Applies ``fn`` recursively to every sublayer (as returned by ``.sublayers()``) + as well as self. Typical use includes initializing the parameters of a model. + + Parameters: + fn (function): a function to be applied to each sublayer + + Returns: + Layer: self + + Example:: + .. code-block:: python + + import paddle + import paddle.nn as nn + + paddle.enable_imperative() + + net = nn.Sequential(nn.Linear(2, 2), nn.Linear(2, 2)) + + def init_weights(layer): + if type(layer) == nn.Linear: + print('before init weight:', layer.weight.numpy()) + new_weight = paddle.fill_constant(layer.weight.shape, layer.weight.dtype, value=0.9) + layer.weight.set_value(new_weight) + print('after init weight:', layer.weight.numpy()) + + net.apply(init_weights) + + print(net.state_dict()) + """ + for layer in self.sublayers(): + layer.apply(fn) + + fn(self) + + return self + def full_name(self): """Full name for this layer, composed by name_scope + "/" + MyLayer.__class__.__name__ diff --git a/python/paddle/fluid/tests/unittests/test_imperative_layer_apply.py b/python/paddle/fluid/tests/unittests/test_imperative_layer_apply.py new file mode 100644 index 00000000000..a391c088a36 --- /dev/null +++ b/python/paddle/fluid/tests/unittests/test_imperative_layer_apply.py @@ -0,0 +1,90 @@ +# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import unittest + +import paddle +import paddle.nn as nn +import paddle.fluid as fluid + +import numpy as np + + +class LeNetDygraph(fluid.dygraph.Layer): + def __init__(self, num_classes=10, classifier_activation='softmax'): + super(LeNetDygraph, self).__init__() + self.num_classes = num_classes + self.features = nn.Sequential( + nn.Conv2D( + 1, 6, 3, stride=1, padding=1), + nn.ReLU(), + nn.Pool2D(2, 'max', 2), + nn.Conv2D( + 6, 16, 5, stride=1, padding=0), + nn.ReLU(), + nn.Pool2D(2, 'max', 2)) + + if num_classes > 0: + self.fc = nn.Sequential( + nn.Linear(400, 120), + nn.Linear(120, 84), + nn.Linear( + 84, 10, act=classifier_activation)) + + def forward(self, inputs): + x = self.features(inputs) + + if self.num_classes > 0: + x = fluid.layers.flatten(x, 1) + x = self.fc(x) + return x + + +def init_weights(layer): + if type(layer) == nn.Linear: + new_weight = paddle.fill_constant( + layer.weight.shape, layer.weight.dtype, value=0.9) + layer.weight.set_value(new_weight) + new_bias = paddle.fill_constant( + layer.bias.shape, layer.bias.dtype, value=-0.1) + layer.bias.set_value(new_bias) + elif type(layer) == nn.Conv2D: + new_weight = paddle.fill_constant( + layer.weight.shape, layer.weight.dtype, value=0.7) + layer.weight.set_value(new_weight) + new_bias = paddle.fill_constant( + layer.bias.shape, layer.bias.dtype, value=-0.2) + layer.bias.set_value(new_bias) + + +class TestLayerApply(unittest.TestCase): + def test_apply_init_weight(self): + with fluid.dygraph.guard(): + net = LeNetDygraph() + + net.apply(init_weights) + + for layer in net.sublayers(): + if type(layer) == nn.Linear: + np.testing.assert_allclose(layer.weight.numpy(), 0.9) + np.testing.assert_allclose(layer.bias.numpy(), -0.1) + elif type(layer) == nn.Conv2D: + np.testing.assert_allclose(layer.weight.numpy(), 0.7) + np.testing.assert_allclose(layer.bias.numpy(), -0.2) + + +if __name__ == '__main__': + unittest.main() -- GitLab