From de46b159519dcd9d2db3875dd7d47bfc545ac209 Mon Sep 17 00:00:00 2001 From: lilong12 Date: Thu, 5 Dec 2019 14:41:46 +0800 Subject: [PATCH] Unify the rank of prelu alpha to 4, corresponding to [N, C, H, W], except for the all mode --- python/paddle/fluid/layers/nn.py | 2 +- python/paddle/fluid/tests/unittests/test_layers.py | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/python/paddle/fluid/layers/nn.py b/python/paddle/fluid/layers/nn.py index dab6e53ca4d..53974cb6763 100644 --- a/python/paddle/fluid/layers/nn.py +++ b/python/paddle/fluid/layers/nn.py @@ -8693,7 +8693,7 @@ def prelu(x, mode, param_attr=None, name=None): if mode == 'channel': alpha_shape = [1, x.shape[1], 1, 1] elif mode == 'element': - alpha_shape = x.shape[1:] + alpha_shape = [1, x.shape[1], x.shape[2], x.shape[3]] dtype = helper.input_dtype(input_param_name='x') alpha = helper.create_parameter( attr=helper.param_attr, diff --git a/python/paddle/fluid/tests/unittests/test_layers.py b/python/paddle/fluid/tests/unittests/test_layers.py index ff267698456..18132fc0ed1 100644 --- a/python/paddle/fluid/tests/unittests/test_layers.py +++ b/python/paddle/fluid/tests/unittests/test_layers.py @@ -712,7 +712,7 @@ class TestLayer(LayerTest): self.assertTrue( np.array_equal(btp1.bias.numpy(), btp2.bias.numpy())) - def test_prelu(self): + def prelu_test(self, mode): inp_np = np.ones([5, 200, 100, 100]).astype('float32') with self.static_graph(): data_t = layers.data( @@ -720,7 +720,6 @@ class TestLayer(LayerTest): shape=[5, 200, 100, 100], dtype="float32", append_batch_size=False) - mode = 'channel' out = layers.prelu( data_t, mode, param_attr=ParamAttr(initializer=Constant(1.0))) static_rlt = self.get_static_graph_result( @@ -732,7 +731,6 @@ class TestLayer(LayerTest): shape=[5, 200, 100, 100], dtype="float32", append_batch_size=False) - mode = 'channel' prelu = nn.PRelu( 'prelu', mode=mode, @@ -742,7 +740,6 @@ class TestLayer(LayerTest): feed={"input": inp_np}, fetch_list=[out])[0] with self.dynamic_graph(): - mode = 'channel' prelu = nn.PRelu( 'prelu', mode=mode, @@ -756,7 +753,6 @@ class TestLayer(LayerTest): with self.dynamic_graph(): inp_np = np.random.randn(5, 200, 100, 100).astype("float32") inp = base.to_variable(inp_np) - mode = 'channel' prelu1 = nn.PRelu( 'prelu1', mode=mode, @@ -779,6 +775,11 @@ class TestLayer(LayerTest): self.assertTrue( np.array_equal(prelu1.weight.numpy(), prelu2.weight.numpy())) + def test_prelu(self): + self.prelu_test("channel") + self.prelu_test("element") + self.prelu_test("all") + def test_embeding(self): inp_word = np.array([[[1]]]).astype('int64') dict_size = 20 -- GitLab