From 9d4b4be36c5588bfc35462803201fa6d26f225c7 Mon Sep 17 00:00:00 2001 From: haosicheng <47998305+HarperCy@users.noreply.github.com> Date: Tue, 29 Nov 2022 15:38:52 +0800 Subject: [PATCH] add floor fp32 op *test=kunlun (#48458) --- .../fluid/platform/device/xpu/xpu2_op_list.h | 1 + paddle/phi/kernels/xpu/activation_kernel.cc | 15 +++++++++++ .../unittests/xpu/test_activation_op_xpu.py | 26 +++++++++++++++++++ 3 files changed, 42 insertions(+) diff --git a/paddle/fluid/platform/device/xpu/xpu2_op_list.h b/paddle/fluid/platform/device/xpu/xpu2_op_list.h index b1838a0f71..cdd86479f4 100644 --- a/paddle/fluid/platform/device/xpu/xpu2_op_list.h +++ b/paddle/fluid/platform/device/xpu/xpu2_op_list.h @@ -282,6 +282,7 @@ XPUOpMap& get_kl2_ops() { pOpKernelType(vartype::INT32, XPUPlace()), pOpKernelType(vartype::INT8, XPUPlace()), pOpKernelType(vartype::FP32, XPUPlace())})}, + {"floor", XPUKernelSet({pOpKernelType(vartype::FP32, XPUPlace())})}, {"gather_grad", XPUKernelSet({pOpKernelType(vartype::FP32, XPUPlace()), pOpKernelType(vartype::FP16, XPUPlace())})}, diff --git a/paddle/phi/kernels/xpu/activation_kernel.cc b/paddle/phi/kernels/xpu/activation_kernel.cc index 0d41afeeac..39f928eb11 100644 --- a/paddle/phi/kernels/xpu/activation_kernel.cc +++ b/paddle/phi/kernels/xpu/activation_kernel.cc @@ -426,7 +426,21 @@ struct XPUTanhFunctor : public funcs::BaseActivationFunctor { } }; +template +struct XPUFloorFunctor : public funcs::BaseActivationFunctor { + using XPUType = typename XPUTypeTrait::Type; + template + void operator()(const Context& dev_ctx, + const DenseTensor& x, + DenseTensor* out) const { + int r = xpu_activation_func( + dev_ctx, x, out, xpu::floor); + PADDLE_ENFORCE_XDNN_SUCCESS(r, "floor"); + } +}; + DEFINE_XPU_ACTIVATION_KERNEL(Exp, XPUExpFunctor) +DEFINE_XPU_ACTIVATION_KERNEL(Floor, XPUFloorFunctor) DEFINE_XPU_ACTIVATION_KERNEL(Log, XPULogFunctor) DEFINE_XPU_ACTIVATION_KERNEL(Reciprocal, XPUReciprocalFunctor) DEFINE_XPU_ACTIVATION_KERNEL(Relu, XPUReluFunctor) @@ -483,6 +497,7 @@ PD_REGISTER_KERNEL( square, XPU, ALL_LAYOUT, phi::SquareKernel, float, phi::dtype::float16) {} PD_REGISTER_ACTIVATION_KERNEL(exp, ExpKernel) // no grad +PD_REGISTER_ACTIVATION_KERNEL(floor, FloorKernel) PD_REGISTER_ACTIVATION_KERNEL(log, LogKernel) PD_REGISTER_ACTIVATION_KERNEL(leaky_relu, LeakyReluKernel) PD_REGISTER_ACTIVATION_KERNEL(hard_sigmoid, HardSigmoidKernel) diff --git a/python/paddle/fluid/tests/unittests/xpu/test_activation_op_xpu.py b/python/paddle/fluid/tests/unittests/xpu/test_activation_op_xpu.py index 8c4c722cbf..c30a472618 100644 --- a/python/paddle/fluid/tests/unittests/xpu/test_activation_op_xpu.py +++ b/python/paddle/fluid/tests/unittests/xpu/test_activation_op_xpu.py @@ -177,6 +177,32 @@ for stype in support_types: create_test_class(globals(), XPUTestSqrtOP, stype) +class XPUTestFloorOP(XPUOpTestWrapper): + def __init__(self): + self.op_name = 'floor' + self.use_dynamic_create_class = False + + class XPUTestSqrt(TestActivationOPBase): + def set_case(self): + self.op_type = "floor" + self.dtype = self.in_type + + x = np.random.uniform(0.1, 1, [11, 17]).astype(self.dtype) + out = np.floor(x) + + self.attrs = {'use_xpu': True} + self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)} + self.outputs = {'Out': out} + + def test_check_grad(self): + self.check_output_with_place(self.place) + + +support_types = get_xpu_op_support_types('floor') +for stype in support_types: + create_test_class(globals(), XPUTestFloorOP, stype) + + class XPUTestAbsOP(XPUOpTestWrapper): def __init__(self): self.op_name = 'abs' -- GitLab