diff --git a/paddle/phi/api/yaml/legacy_backward.yaml b/paddle/phi/api/yaml/legacy_backward.yaml index b4c041fcd63d2cb5518098afbc98cedb77a80956..a172fa337616aa617e2bb3c114c43e8f200be83a 100755 --- a/paddle/phi/api/yaml/legacy_backward.yaml +++ b/paddle/phi/api/yaml/legacy_backward.yaml @@ -1151,6 +1151,17 @@ data_type : x optional : boxes_num +- backward_op : rrelu_grad + forward : rrelu (Tensor x, float lower, float upper, bool is_test) -> Tensor(out), Tensor(noise) + args : (Tensor x, Tensor noise, Tensor out_grad) + output : Tensor(x_grad) + infer_meta : + func : RReluGradInferMeta + param : [out_grad, noise] + kernel : + func : rrelu_grad + data_type : x + - backward_op : scale_grad forward : scale (Tensor x, Scalar scale, float bias, bool bias_after_scale) -> Tensor(out) args : (Tensor out_grad, Scalar scale=1.0, bool bias_after_scale=true) diff --git a/paddle/phi/api/yaml/legacy_ops.yaml b/paddle/phi/api/yaml/legacy_ops.yaml index 07ab8ca455ec0fae3bd0d89fafa4eeeeae520ebd..2d61c8c976c2c68e4fda36d4bfa7f9c92acc0fc7 100755 --- a/paddle/phi/api/yaml/legacy_ops.yaml +++ b/paddle/phi/api/yaml/legacy_ops.yaml @@ -1537,6 +1537,17 @@ intermediate : arg_max backward : roi_pool_grad +- op : rrelu + args : (Tensor x, float lower, float upper, bool is_test) + output : Tensor(out), Tensor(noise) + infer_meta : + func : RReluInferMeta + kernel : + func : rrelu + data_type : x + intermediate : noise + backward : rrelu_grad + - op : scale args : (Tensor x, Scalar scale, float bias, bool bias_after_scale) output : Tensor(out) diff --git a/python/paddle/fluid/tests/unittests/test_rrelu_op.py b/python/paddle/fluid/tests/unittests/test_rrelu_op.py index 96bccf8120257eb022322a31e8bf480729a3dd81..c7523a5f9b3ec96d897b4e63ea6736e3d9bf742b 100644 --- a/python/paddle/fluid/tests/unittests/test_rrelu_op.py +++ b/python/paddle/fluid/tests/unittests/test_rrelu_op.py @@ -311,6 +311,10 @@ class TestFunctionalRReluAPI(unittest.TestCase): self.assertRaises(ValueError, error_lower_upper) +def rrelu(x, lower, upper, training): + return paddle.nn.functional.rrelu(x, lower, upper, training=not training) + + class RReluTest(OpTest): def setUp(self): self.op_type = "rrelu" @@ -318,6 +322,10 @@ class RReluTest(OpTest): self.upper = 0.3 self.is_test = True self.init_params() + self.python_api = rrelu + self.python_out_sig = [ + "Out" + ] # python out sig is customized output signature. def init_params(self): self.dtype = "float64" @@ -337,10 +345,10 @@ class RReluTest(OpTest): } def test_check_output(self): - self.check_output() + self.check_output(no_check_set=['Noise'], check_eager=True) def test_check_grad(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_eager=True) class RReluTrainingTest(RReluTest): @@ -350,6 +358,10 @@ class RReluTrainingTest(RReluTest): self.upper = 0.300000009 self.is_test = False self.init_params() + self.python_api = rrelu + self.python_out_sig = [ + "Out" + ] # python out sig is customized output signature. if __name__ == "__main__": diff --git a/python/paddle/fluid/tests/unittests/white_list/no_check_set_white_list.py b/python/paddle/fluid/tests/unittests/white_list/no_check_set_white_list.py index fb0cb2d7a5aee17c005ccea2c0ecc309a1ce4950..baf9e3bf6e6a26e20f5df165a4692e78f376e0d8 100644 --- a/python/paddle/fluid/tests/unittests/white_list/no_check_set_white_list.py +++ b/python/paddle/fluid/tests/unittests/white_list/no_check_set_white_list.py @@ -37,4 +37,5 @@ no_check_set_white_list = [ 'class_center_sample', 'einsum', 'rmsprop', + 'rrelu', ] diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index 74f90c29707da2061fe3c693ba5f026ee3635b35..f4f4bb6b9f891d368a74097cd14061b1cb8a6ea5 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -654,10 +654,7 @@ def rrelu(x, lower=1.0 / 8.0, upper=1.0 / 3.0, training=True, name=None): is_test = not training if in_dygraph_mode(): - out, noise = _legacy_C_ops.rrelu( - x, 'lower', lower, 'upper', upper, 'is_test', is_test - ) - return out + return _C_ops.rrelu(x, lower, upper, is_test) else: check_variable_and_dtype( x, 'X', ['float16', 'float32', 'float64'], 'rrelu'