From 1ae3f47f599ce6709fd6c23ef4b3fd75c8ad3c4c Mon Sep 17 00:00:00 2001 From: songyouwei Date: Thu, 9 Jan 2020 10:54:46 +0800 Subject: [PATCH] update PRelu arg (#21946) * update PRelu arg test=develop * fix unittests test=develop * fix element mode alpha shape test=develop * split channel_or_input_shape arg test=develop * fix unittest test=develop --- python/paddle/fluid/dygraph/nn.py | 52 +++++++++++++------ .../test_imperative_load_static_param.py | 4 +- .../fluid/tests/unittests/test_layers.py | 4 ++ 3 files changed, 42 insertions(+), 18 deletions(-) diff --git a/python/paddle/fluid/dygraph/nn.py b/python/paddle/fluid/dygraph/nn.py index b38e405ff62..5c28617b6e7 100644 --- a/python/paddle/fluid/dygraph/nn.py +++ b/python/paddle/fluid/dygraph/nn.py @@ -1898,8 +1898,12 @@ class PRelu(layers.Layer): and element. all: all elements share same weight channel:elements in a channel share same weight element:each element has a weight + channel (int, optional): The number of channels. + This argument is required when mode is "channel". + Default: None. input_shape (list or tuple, optional): The shape of input. - This parameter is required when mode is not "all". Default: None. + This argument is required when mode is "element". + Default: None. param_attr(ParamAttr, optional): The parameter attribute for the learnable weight (alpha). Default: None. dtype (str, optional): Data type, it can be "float32" or "float64". Default: "float32". @@ -1921,31 +1925,47 @@ class PRelu(layers.Layer): inp_np = np.ones([5, 200, 100, 100]).astype('float32') with fluid.dygraph.guard(): inp_np = to_variable(inp_np) - mode = 'channel' - prelu = fluid.PRelu( - mode=mode, + prelu0 = fluid.PRelu( + mode='all', + param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0))) + dy_rlt0 = prelu0(inp_np) + prelu1 = fluid.PRelu( + mode='channel', + channel=200, + param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0))) + dy_rlt1 = prelu1(inp_np) + prelu2 = fluid.PRelu( + mode='element', input_shape=inp_np.shape, param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0))) - dy_rlt = prelu(inp_np) + dy_rlt2 = prelu2(inp_np) """ - def __init__(self, mode, input_shape=None, param_attr=None, + def __init__(self, + mode, + channel=None, + input_shape=None, + param_attr=None, dtype='float32'): super(PRelu, self).__init__() self._mode = mode self._param_attr = param_attr - if self._mode not in ['all', 'channel', 'element']: - raise ValueError('mode should be one of all, channel, element.') self._dtype = dtype - self._alpha_shape = [1] - if mode is not 'all': - assert input_shape is not None - input_shape = list(input_shape) - if self._mode == 'channel': - self._alpha_shape = [1, input_shape[1], 1, 1] - elif self._mode == 'element': - self._alpha_shape = input_shape + if mode == 'all': + self._alpha_shape = [1] + elif mode == 'channel': + assert isinstance( + channel, + int), "channel argument is required when mode is 'channel'." + self._alpha_shape = [1, channel, 1, 1] + elif mode == 'element': + assert isinstance(input_shape, ( + list, tuple + )), "input_shape argument is required when mode is 'element'." + self._alpha_shape = [1] + list(input_shape)[1:] + else: + raise ValueError('mode should be one of all, channel, element.') self.weight = self.create_parameter( attr=self._param_attr, shape=self._alpha_shape, diff --git a/python/paddle/fluid/tests/unittests/test_imperative_load_static_param.py b/python/paddle/fluid/tests/unittests/test_imperative_load_static_param.py index d25e3a76cb7..30bf44d88b7 100644 --- a/python/paddle/fluid/tests/unittests/test_imperative_load_static_param.py +++ b/python/paddle/fluid/tests/unittests/test_imperative_load_static_param.py @@ -179,8 +179,8 @@ class TestDygraphLoadStatic(unittest.TestCase): self.nce1 = NCE(10000, 100) self.nce2 = NCE(10000, 100) - self.prelu1 = PRelu("channel", [-1, 5, 10, 10]) - self.prelu2 = PRelu("channel", [-1, 5, 10, 10]) + self.prelu1 = PRelu("channel", channel=5) + self.prelu2 = PRelu("channel", channel=5) self.group_norm1 = GroupNorm(8, 4) self.gourp_norm2 = GroupNorm(8, 4) diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index fa345be1ff0..cde4264ff14 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -676,6 +676,7 @@ class TestLayer(LayerTest): append_batch_size=False) prelu = nn.PRelu( mode=mode, + channel=inp_np.shape[1], input_shape=data_t.shape, param_attr=ParamAttr(initializer=Constant(1.0))) out = prelu(data_t) @@ -685,6 +686,7 @@ class TestLayer(LayerTest): with self.dynamic_graph(): prelu = nn.PRelu( mode=mode, + channel=inp_np.shape[1], input_shape=inp_np.shape, param_attr=ParamAttr(initializer=Constant(1.0))) dy_rlt = prelu(base.to_variable(inp_np)) @@ -698,10 +700,12 @@ class TestLayer(LayerTest): inp = base.to_variable(inp_np) prelu1 = nn.PRelu( mode=mode, + channel=inp_np.shape[1], input_shape=inp_np.shape, param_attr=ParamAttr(initializer=Constant(2.0))) prelu2 = nn.PRelu( mode=mode, + channel=inp_np.shape[1], input_shape=inp_np.shape, param_attr=ParamAttr(initializer=Constant(1.0))) dy_rlt1 = prelu1(inp) -- GitLab