未验证 提交 8447f876 编写于 作者: W Weilong Wu 提交者: GitHub

[PHI] rrelu add yaml (#49779)

* [PHI] rrelu add yaml

* polish

* polish
上级 163c6a9e
......@@ -1151,6 +1151,17 @@
data_type : x
optional : boxes_num
- backward_op : rrelu_grad
forward : rrelu (Tensor x, float lower, float upper, bool is_test) -> Tensor(out), Tensor(noise)
args : (Tensor x, Tensor noise, Tensor out_grad)
output : Tensor(x_grad)
infer_meta :
func : RReluGradInferMeta
param : [out_grad, noise]
kernel :
func : rrelu_grad
data_type : x
- backward_op : scale_grad
forward : scale (Tensor x, Scalar scale, float bias, bool bias_after_scale) -> Tensor(out)
args : (Tensor out_grad, Scalar scale=1.0, bool bias_after_scale=true)
......
......@@ -1537,6 +1537,17 @@
intermediate : arg_max
backward : roi_pool_grad
- op : rrelu
args : (Tensor x, float lower, float upper, bool is_test)
output : Tensor(out), Tensor(noise)
infer_meta :
func : RReluInferMeta
kernel :
func : rrelu
data_type : x
intermediate : noise
backward : rrelu_grad
- op : scale
args : (Tensor x, Scalar scale, float bias, bool bias_after_scale)
output : Tensor(out)
......
......@@ -311,6 +311,10 @@ class TestFunctionalRReluAPI(unittest.TestCase):
self.assertRaises(ValueError, error_lower_upper)
def rrelu(x, lower, upper, training):
return paddle.nn.functional.rrelu(x, lower, upper, training=not training)
class RReluTest(OpTest):
def setUp(self):
self.op_type = "rrelu"
......@@ -318,6 +322,10 @@ class RReluTest(OpTest):
self.upper = 0.3
self.is_test = True
self.init_params()
self.python_api = rrelu
self.python_out_sig = [
"Out"
] # python out sig is customized output signature.
def init_params(self):
self.dtype = "float64"
......@@ -337,10 +345,10 @@ class RReluTest(OpTest):
}
def test_check_output(self):
self.check_output()
self.check_output(no_check_set=['Noise'], check_eager=True)
def test_check_grad(self):
self.check_grad(['X'], 'Out')
self.check_grad(['X'], 'Out', check_eager=True)
class RReluTrainingTest(RReluTest):
......@@ -350,6 +358,10 @@ class RReluTrainingTest(RReluTest):
self.upper = 0.300000009
self.is_test = False
self.init_params()
self.python_api = rrelu
self.python_out_sig = [
"Out"
] # python out sig is customized output signature.
if __name__ == "__main__":
......
......@@ -37,4 +37,5 @@ no_check_set_white_list = [
'class_center_sample',
'einsum',
'rmsprop',
'rrelu',
]
......@@ -654,10 +654,7 @@ def rrelu(x, lower=1.0 / 8.0, upper=1.0 / 3.0, training=True, name=None):
is_test = not training
if in_dygraph_mode():
out, noise = _legacy_C_ops.rrelu(
x, 'lower', lower, 'upper', upper, 'is_test', is_test
)
return out
return _C_ops.rrelu(x, lower, upper, is_test)
else:
check_variable_and_dtype(
x, 'X', ['float16', 'float32', 'float64'], 'rrelu'
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册