提交 1ae3f47f 编写于 作者: S songyouwei 提交者: hong

update PRelu arg (#21946)

* update PRelu arg
test=develop

* fix unittests
test=develop

* fix element mode alpha shape
test=develop

* split channel_or_input_shape arg
test=develop

* fix unittest
test=develop
上级 8b29fec7
...@@ -1898,8 +1898,12 @@ class PRelu(layers.Layer): ...@@ -1898,8 +1898,12 @@ class PRelu(layers.Layer):
and element. all: all elements share same weight and element. all: all elements share same weight
channel:elements in a channel share same weight channel:elements in a channel share same weight
element:each element has a weight element:each element has a weight
channel (int, optional): The number of channels.
This argument is required when mode is "channel".
Default: None.
input_shape (list or tuple, optional): The shape of input. input_shape (list or tuple, optional): The shape of input.
This parameter is required when mode is not "all". Default: None. This argument is required when mode is "element".
Default: None.
param_attr(ParamAttr, optional): The parameter attribute for the learnable param_attr(ParamAttr, optional): The parameter attribute for the learnable
weight (alpha). Default: None. weight (alpha). Default: None.
dtype (str, optional): Data type, it can be "float32" or "float64". Default: "float32". dtype (str, optional): Data type, it can be "float32" or "float64". Default: "float32".
...@@ -1921,31 +1925,47 @@ class PRelu(layers.Layer): ...@@ -1921,31 +1925,47 @@ class PRelu(layers.Layer):
inp_np = np.ones([5, 200, 100, 100]).astype('float32') inp_np = np.ones([5, 200, 100, 100]).astype('float32')
with fluid.dygraph.guard(): with fluid.dygraph.guard():
inp_np = to_variable(inp_np) inp_np = to_variable(inp_np)
mode = 'channel' prelu0 = fluid.PRelu(
prelu = fluid.PRelu( mode='all',
mode=mode, param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0)))
dy_rlt0 = prelu0(inp_np)
prelu1 = fluid.PRelu(
mode='channel',
channel=200,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0)))
dy_rlt1 = prelu1(inp_np)
prelu2 = fluid.PRelu(
mode='element',
input_shape=inp_np.shape, input_shape=inp_np.shape,
param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0))) param_attr=fluid.ParamAttr(initializer=fluid.initializer.Constant(1.0)))
dy_rlt = prelu(inp_np) dy_rlt2 = prelu2(inp_np)
""" """
def __init__(self, mode, input_shape=None, param_attr=None, def __init__(self,
mode,
channel=None,
input_shape=None,
param_attr=None,
dtype='float32'): dtype='float32'):
super(PRelu, self).__init__() super(PRelu, self).__init__()
self._mode = mode self._mode = mode
self._param_attr = param_attr self._param_attr = param_attr
if self._mode not in ['all', 'channel', 'element']:
raise ValueError('mode should be one of all, channel, element.')
self._dtype = dtype self._dtype = dtype
self._alpha_shape = [1] if mode == 'all':
if mode is not 'all': self._alpha_shape = [1]
assert input_shape is not None elif mode == 'channel':
input_shape = list(input_shape) assert isinstance(
if self._mode == 'channel': channel,
self._alpha_shape = [1, input_shape[1], 1, 1] int), "channel argument is required when mode is 'channel'."
elif self._mode == 'element': self._alpha_shape = [1, channel, 1, 1]
self._alpha_shape = input_shape elif mode == 'element':
assert isinstance(input_shape, (
list, tuple
)), "input_shape argument is required when mode is 'element'."
self._alpha_shape = [1] + list(input_shape)[1:]
else:
raise ValueError('mode should be one of all, channel, element.')
self.weight = self.create_parameter( self.weight = self.create_parameter(
attr=self._param_attr, attr=self._param_attr,
shape=self._alpha_shape, shape=self._alpha_shape,
......
...@@ -179,8 +179,8 @@ class TestDygraphLoadStatic(unittest.TestCase): ...@@ -179,8 +179,8 @@ class TestDygraphLoadStatic(unittest.TestCase):
self.nce1 = NCE(10000, 100) self.nce1 = NCE(10000, 100)
self.nce2 = NCE(10000, 100) self.nce2 = NCE(10000, 100)
self.prelu1 = PRelu("channel", [-1, 5, 10, 10]) self.prelu1 = PRelu("channel", channel=5)
self.prelu2 = PRelu("channel", [-1, 5, 10, 10]) self.prelu2 = PRelu("channel", channel=5)
self.group_norm1 = GroupNorm(8, 4) self.group_norm1 = GroupNorm(8, 4)
self.gourp_norm2 = GroupNorm(8, 4) self.gourp_norm2 = GroupNorm(8, 4)
......
...@@ -676,6 +676,7 @@ class TestLayer(LayerTest): ...@@ -676,6 +676,7 @@ class TestLayer(LayerTest):
append_batch_size=False) append_batch_size=False)
prelu = nn.PRelu( prelu = nn.PRelu(
mode=mode, mode=mode,
channel=inp_np.shape[1],
input_shape=data_t.shape, input_shape=data_t.shape,
param_attr=ParamAttr(initializer=Constant(1.0))) param_attr=ParamAttr(initializer=Constant(1.0)))
out = prelu(data_t) out = prelu(data_t)
...@@ -685,6 +686,7 @@ class TestLayer(LayerTest): ...@@ -685,6 +686,7 @@ class TestLayer(LayerTest):
with self.dynamic_graph(): with self.dynamic_graph():
prelu = nn.PRelu( prelu = nn.PRelu(
mode=mode, mode=mode,
channel=inp_np.shape[1],
input_shape=inp_np.shape, input_shape=inp_np.shape,
param_attr=ParamAttr(initializer=Constant(1.0))) param_attr=ParamAttr(initializer=Constant(1.0)))
dy_rlt = prelu(base.to_variable(inp_np)) dy_rlt = prelu(base.to_variable(inp_np))
...@@ -698,10 +700,12 @@ class TestLayer(LayerTest): ...@@ -698,10 +700,12 @@ class TestLayer(LayerTest):
inp = base.to_variable(inp_np) inp = base.to_variable(inp_np)
prelu1 = nn.PRelu( prelu1 = nn.PRelu(
mode=mode, mode=mode,
channel=inp_np.shape[1],
input_shape=inp_np.shape, input_shape=inp_np.shape,
param_attr=ParamAttr(initializer=Constant(2.0))) param_attr=ParamAttr(initializer=Constant(2.0)))
prelu2 = nn.PRelu( prelu2 = nn.PRelu(
mode=mode, mode=mode,
channel=inp_np.shape[1],
input_shape=inp_np.shape, input_shape=inp_np.shape,
param_attr=ParamAttr(initializer=Constant(1.0))) param_attr=ParamAttr(initializer=Constant(1.0)))
dy_rlt1 = prelu1(inp) dy_rlt1 = prelu1(inp)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册