From fcb2ac6001e3095bf46277b89af57c1f8a8d675f Mon Sep 17 00:00:00 2001 From: huangjun12 <2399845970@qq.com> Date: Fri, 4 Sep 2020 10:05:28 +0800 Subject: [PATCH] fix alpha dropout bug when p=1, test=develop (#26977) --- .../paddle/fluid/tests/unittests/test_dropout_op.py | 11 ++++++++++- python/paddle/nn/functional/common.py | 2 ++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/python/paddle/fluid/tests/unittests/test_dropout_op.py b/python/paddle/fluid/tests/unittests/test_dropout_op.py index 97137e91aa7..7b9e25e1d4a 100644 --- a/python/paddle/fluid/tests/unittests/test_dropout_op.py +++ b/python/paddle/fluid/tests/unittests/test_dropout_op.py @@ -43,7 +43,7 @@ class TestDropoutOp(OpTest): class TestDropoutOpInput1d(OpTest): def setUp(self): self.op_type = "dropout" - self.inputs = {'X': np.random.random((2000)).astype("float32")} + self.inputs = {'X': np.random.random((2000, )).astype("float32")} self.attrs = {'dropout_prob': 0.0, 'fix_seed': True, 'is_test': False} self.outputs = { 'Out': self.inputs['X'], @@ -672,9 +672,11 @@ class TestAlphaDropoutFAPI(unittest.TestCase): res1 = paddle.nn.functional.alpha_dropout(x=input, p=0.) res2 = paddle.nn.functional.alpha_dropout( x=input, p=0., training=False) + res3 = paddle.nn.functional.alpha_dropout(x=input, p=1.) in_np = np.random.random([40, 40]).astype("float32") res_np = in_np + res_np3 = np.zeros_like(in_np) exe = fluid.Executor(place) res_list = [res1, res2] @@ -683,6 +685,10 @@ class TestAlphaDropoutFAPI(unittest.TestCase): feed={"input": in_np}, fetch_list=[res]) self.assertTrue(np.allclose(fetches[0], res_np)) + fetches = exe.run(fluid.default_main_program(), + feed={"input": in_np}, + fetch_list=[res3]) + self.assertTrue(np.allclose(fetches[0], res_np3)) def test_static(self): for place in self.places: @@ -693,15 +699,18 @@ class TestAlphaDropoutFAPI(unittest.TestCase): with fluid.dygraph.guard(place): in_np = np.random.random([40, 40]).astype("float32") res_np = in_np + res_np3 = np.zeros_like(in_np) input = fluid.dygraph.to_variable(in_np) res1 = paddle.nn.functional.alpha_dropout(x=input, p=0.) res2 = paddle.nn.functional.alpha_dropout( x=input, p=0., training=False) + res3 = paddle.nn.functional.alpha_dropout(x=input, p=1.) res_list = [res1, res2] for res in res_list: self.assertTrue(np.allclose(res.numpy(), res_np)) + self.assertTrue(np.allclose(res3.numpy(), res_np3)) class TestAlphaDropoutFAPIError(unittest.TestCase): diff --git a/python/paddle/nn/functional/common.py b/python/paddle/nn/functional/common.py index 633920dc7e8..ad84a32186e 100644 --- a/python/paddle/nn/functional/common.py +++ b/python/paddle/nn/functional/common.py @@ -1091,6 +1091,8 @@ def alpha_dropout(x, p=0.5, training=True, name=None): 'alpha_dropout') if training: + if p == 1: + return layers.scale(x, scale=0.) #get transformation params alpha = 1.6732632423543772848170429916717 scale = 1.0507009873554804934193349852946 -- GitLab