diff --git a/paddle/fluid/prim/api/composite_backward/composite_backward_api.h b/paddle/fluid/prim/api/composite_backward/composite_backward_api.h index 48641d852537105dbbc4af98dae7410243ed1751..ffbeff8cade062b4065c60e02c4bdebc17f179d9 100644 --- a/paddle/fluid/prim/api/composite_backward/composite_backward_api.h +++ b/paddle/fluid/prim/api/composite_backward/composite_backward_api.h @@ -388,6 +388,14 @@ void expand_grad(const Tensor& x, } } +template +void log_grad(const Tensor& x, const Tensor& out_grad, Tensor* x_grad) { + if (x_grad) { + // dx = dout / x + set_output(out_grad / x, x_grad); + } +} + template void exp_grad(const Tensor& out, const Tensor& out_grad, Tensor* x_grad) { if (x_grad) { diff --git a/paddle/phi/api/yaml/backward.yaml b/paddle/phi/api/yaml/backward.yaml index fde5deeafe9fb40d4ff26da79e777416d4d4449f..b70a752b2eca5bf55ec16dbd3595d9d0ca445679 100644 --- a/paddle/phi/api/yaml/backward.yaml +++ b/paddle/phi/api/yaml/backward.yaml @@ -804,6 +804,7 @@ kernel : func : log_grad backward : log_double_grad + composite : log_grad(x, out_grad, x_grad) inplace : (out_grad -> x_grad) - backward_op : log_loss_grad diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index 591f4dc962c06fffba12fd8e4ed5260b760c4b1e..6883a769829cec7a66848cf521467fac41d84041 100755 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -2623,10 +2623,15 @@ class TestLog(TestActivation): def setUp(self): self.op_type = "log" self.check_eager = True + self.prim_op_type = "prim" self.python_api = paddle.log self.init_dtype() self.init_shape() + if len(self.shape) == 0: + # for 0-D tensor, skip cinn testing + self.enable_cinn = False + np.random.seed(1024) x = np.random.uniform(0.1, 1, self.shape).astype(self.dtype) out = np.log(x) @@ -2637,7 +2642,7 @@ class TestLog(TestActivation): def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out', check_eager=True) + self.check_grad(['X'], 'Out', check_eager=True, check_prim=True) def test_error(self): in1 = paddle.static.data(name="in1", shape=[11, 17], dtype="int32") @@ -3846,7 +3851,7 @@ create_test_act_fp16_class(TestSoftRelu, grad_atol=0.85) create_test_act_fp16_class(TestELU) create_test_act_fp16_class(TestCELU) create_test_act_fp16_class(TestReciprocal) -create_test_act_fp16_class(TestLog) +create_test_act_fp16_class(TestLog, check_prim=True) if core.is_compiled_with_rocm(): create_test_act_fp16_class(TestLog2, atol=5e-2, grad_atol=0.85) else: