diff --git a/paddle/fluid/operators/softmax_op_mlu.cc b/paddle/fluid/operators/softmax_op_mlu.cc index b2e7410136c13b75e2472615e94243a4f90552ee..50ef6c65992944c0838870c65c61951e2ca757a2 100644 --- a/paddle/fluid/operators/softmax_op_mlu.cc +++ b/paddle/fluid/operators/softmax_op_mlu.cc @@ -117,10 +117,9 @@ REGISTER_OP_MLU_KERNEL(softmax_grad, ops::SoftmaxGradMLUKernel, ops::SoftmaxGradMLUKernel); -REGISTER_OP_MLU_KERNEL( - log_softmax, - ops::SoftmaxMLUKernel, - ops::SoftmaxMLUKernel); +REGISTER_OP_MLU_KERNEL(log_softmax, + ops::SoftmaxMLUKernel, + ops::SoftmaxMLUKernel); REGISTER_OP_MLU_KERNEL( log_softmax_grad, ops::SoftmaxGradMLUKernel, diff --git a/python/paddle/fluid/tests/unittests/mlu/test_log_softmax_op_mlu.py b/python/paddle/fluid/tests/unittests/mlu/test_log_softmax_op_mlu.py index a1d594b93d01dba053e50dfd98ff6b07da530303..1b81455f4779746e1e3291d86ed8a31a96e2bc50 100644 --- a/python/paddle/fluid/tests/unittests/mlu/test_log_softmax_op_mlu.py +++ b/python/paddle/fluid/tests/unittests/mlu/test_log_softmax_op_mlu.py @@ -86,6 +86,41 @@ class TestLogSoftmaxAxis(TestLogSoftmaxOp): self.axis = 1 +class TestLogSoftmaxOpFp16(OpTest): + + def setUp(self): + self.op_type = 'log_softmax' + self.set_mlu() + self.python_api = F.log_softmax + self.dtype = 'float16' + self.shape = [2, 3, 4, 5] + self.axis = -1 + self.set_attrs() + + x = np.random.uniform(0.1, 1., self.shape).astype(self.dtype) + out = np.apply_along_axis(ref_log_softmax, self.axis, x) + self.x_grad = ref_log_softmax_grad(x, self.axis) + + self.inputs = {'X': x} + self.outputs = {'Out': out} + self.attrs = {'axis': self.axis} + + def set_attrs(self): + pass + + def set_mlu(self): + self.__class__.use_mlu = True + self.place = paddle.device.MLUPlace(0) + + def test_check_output(self): + self.check_output_with_place(self.place, atol=1e-2) + + def test_check_grad(self): + self.check_grad_with_place(self.place, ['X'], ['Out'], + user_defined_grads=[self.x_grad], + max_relative_error=0.015) + + class TestNNLogSoftmaxAPI(unittest.TestCase): def setUp(self):