diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index dfa95f760ce6aa938454bf4d23bba14565bb571f..c273cd4954941876b55895a21bb4b45073b63454 100644 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -1933,6 +1933,8 @@ class TestLeakyRelu(TestActivation): def setUp(self): self.op_type = "leaky_relu" self.python_api = paddle.nn.functional.leaky_relu + self.public_python_api = paddle.nn.functional.leaky_relu + self.prim_op_type = "comp" self.init_dtype() self.init_shape() alpha = self.get_alpha() @@ -1948,10 +1950,13 @@ class TestLeakyRelu(TestActivation): self.attrs = {'alpha': alpha} self.convert_input_output() + def test_check_output(self): + self.check_output(check_prim=True) + def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_prim=True) class TestLeakyReluAlpha1(TestLeakyRelu): @@ -1973,6 +1978,26 @@ class TestLeakyRelu_ZeroDim(TestLeakyRelu): def init_shape(self): self.shape = [] + def setUp(self): + self.op_type = "leaky_relu" + self.prim_op_type = "comp" + self.enable_cinn = False + self.python_api = paddle.nn.functional.leaky_relu + self.public_python_api = paddle.nn.functional.relu + self.init_dtype() + self.init_shape() + alpha = self.get_alpha() + + np.random.seed(1024) + x = np.random.uniform(-1, 1, self.shape).astype(self.dtype) + # The same reason with TestAbs + x[np.abs(x) < 0.005] = 0.05 + out = ref_leaky_relu(x, alpha) + + self.inputs = {'X': x} + self.outputs = {'Out': out} + self.attrs = {'alpha': alpha} + class TestLeakyReluAPI(unittest.TestCase): # test paddle.nn.LeakyReLU, paddle.nn.functional.leaky_relu, @@ -4031,11 +4056,13 @@ create_test_act_fp16_class(TestHardSigmoid) create_test_act_fp16_class(TestSwish) create_test_act_fp16_class(TestHardSwish, check_prim=True) create_test_act_fp16_class(TestMish) -create_test_act_fp16_class(TestLeakyRelu) -create_test_act_fp16_class(TestLeakyReluAlpha1) -create_test_act_fp16_class(TestLeakyReluAlpha2) -create_test_act_fp16_class(TestLeakyReluAlpha3) -create_test_act_fp16_class(TestLeakyRelu_ZeroDim) +create_test_act_fp16_class(TestLeakyRelu, check_prim=True) +create_test_act_fp16_class(TestLeakyReluAlpha1, check_prim=True) +create_test_act_fp16_class(TestLeakyReluAlpha2, check_prim=True) +create_test_act_fp16_class(TestLeakyReluAlpha3, check_prim=True) +create_test_act_fp16_class( + TestLeakyRelu_ZeroDim, check_prim=True, enable_cinn=False +) create_test_act_fp16_class(TestRsqrt) @@ -4142,11 +4169,19 @@ create_test_act_bf16_class(TestHardSigmoid) create_test_act_bf16_class(TestSwish) create_test_act_bf16_class(TestHardSwish, check_prim=True) create_test_act_bf16_class(TestMish) -create_test_act_bf16_class(TestLeakyRelu) -create_test_act_bf16_class(TestLeakyReluAlpha1) -create_test_act_bf16_class(TestLeakyReluAlpha2) -create_test_act_bf16_class(TestLeakyReluAlpha3) -create_test_act_bf16_class(TestLeakyRelu_ZeroDim) +create_test_act_bf16_class(TestLeakyRelu, check_prim=True, enable_cinn=False) +create_test_act_bf16_class( + TestLeakyReluAlpha1, check_prim=True, enable_cinn=False +) +create_test_act_bf16_class( + TestLeakyReluAlpha2, check_prim=True, enable_cinn=False +) +create_test_act_bf16_class( + TestLeakyReluAlpha3, check_prim=True, enable_cinn=False +) +create_test_act_bf16_class( + TestLeakyRelu_ZeroDim, check_prim=True, enable_cinn=False +) create_test_act_bf16_class(TestRsqrt) if __name__ == "__main__": diff --git a/python/paddle/incubate/autograd/composite_rules.py b/python/paddle/incubate/autograd/composite_rules.py index 72bc1601bfacc438e65f3663f4e7e55e3947d465..3a1a3ea7d6751aadf3d827f5a3972c4789bd8435 100644 --- a/python/paddle/incubate/autograd/composite_rules.py +++ b/python/paddle/incubate/autograd/composite_rules.py @@ -677,3 +677,12 @@ def group_norm_composite(x, scale, bias, epsilon, groups, data_layout): if is_amp: out = cast(out, "float16") return out, ret_mean_, ret_var_ + + +@REGISTER_COMPOSITE('leaky_relu') +def leaky_relu_composite(x, negative_slope): + """define composite rule of op leaky_relu.""" + if negative_slope < 1.0: + return maximum(x, negative_slope * x) + else: + return minimum(x, negative_slope * x)