From ffd359086ac318444fc1ff1e12b8a0757be511a0 Mon Sep 17 00:00:00 2001 From: houj04 <35131887+houj04@users.noreply.github.com> Date: Tue, 20 Sep 2022 12:53:04 +0800 Subject: [PATCH] [XPU] update xdnn activations. (#46246) --- cmake/external/xpu.cmake | 4 +- paddle/phi/kernels/xpu/activation_kernel.cc | 41 +++++++++++++++++---- 2 files changed, 35 insertions(+), 10 deletions(-) diff --git a/cmake/external/xpu.cmake b/cmake/external/xpu.cmake index 677fed84dc..d77f015036 100644 --- a/cmake/external/xpu.cmake +++ b/cmake/external/xpu.cmake @@ -10,7 +10,7 @@ set(XPU_RT_LIB_NAME "libxpurt.so") if(NOT DEFINED XPU_BASE_URL) set(XPU_BASE_URL_WITHOUT_DATE "https://baidu-kunlun-product.cdn.bcebos.com/KL-SDK/klsdk-dev") - set(XPU_BASE_URL "${XPU_BASE_URL_WITHOUT_DATE}/20220907") + set(XPU_BASE_URL "${XPU_BASE_URL_WITHOUT_DATE}/20220919") else() set(XPU_BASE_URL "${XPU_BASE_URL}") endif() @@ -19,7 +19,7 @@ endif() if(NOT DEFINED XPU_XDNN_BASE_URL) set(XPU_XDNN_BASE_URL_WITHOUT_DATE "https://klx-sdk-release-public.su.bcebos.com/xdnn/dev") - set(XPU_XDNN_BASE_URL "${XPU_XDNN_BASE_URL_WITHOUT_DATE}/20220907") + set(XPU_XDNN_BASE_URL "${XPU_XDNN_BASE_URL_WITHOUT_DATE}/20220919") else() set(XPU_XDNN_BASE_URL "${XPU_XDNN_BASE_URL}") endif() diff --git a/paddle/phi/kernels/xpu/activation_kernel.cc b/paddle/phi/kernels/xpu/activation_kernel.cc index 514d5e0b28..3bb59f52bb 100644 --- a/paddle/phi/kernels/xpu/activation_kernel.cc +++ b/paddle/phi/kernels/xpu/activation_kernel.cc @@ -82,18 +82,43 @@ int xpu_activation_func( } template -int xpu_activation_1attr_func( +int xpu_activation_func_with_max_x_y( const Context& dev_ctx, const DenseTensor& x, DenseTensor* out, - float attr, - std::function + std::function< + int(xpu::Context*, const XPUType*, XPUType*, int, const float*, float*)> func) { + // does not support "const float* max_x, float* max_y" now int r = func(dev_ctx.x_context(), reinterpret_cast(x.data()), reinterpret_cast(out->data()), x.numel(), - attr); + nullptr, + nullptr); + return r; +} + +template +int xpu_activation_1attr_func(const Context& dev_ctx, + const DenseTensor& x, + DenseTensor* out, + float attr, + std::function func) { + // does not support "const float* max_x, float* max_y" now + int r = func(dev_ctx.x_context(), + reinterpret_cast(x.data()), + reinterpret_cast(out->data()), + x.numel(), + attr, + nullptr, + nullptr); return r; } @@ -213,7 +238,7 @@ struct XPUHardSwishFunctor : public funcs::BaseActivationFunctor { offset, 3.0f, errors::External("Not support offset [%f] in XPU", offset)); - int r = xpu_activation_func( + int r = xpu_activation_func_with_max_x_y( dev_ctx, x, out, xpu::hard_swish); PADDLE_ENFORCE_XDNN_SUCCESS(r, "hard_swish"); } @@ -259,7 +284,7 @@ struct XPURelu6Functor : public funcs::BaseActivationFunctor { void operator()(const Context& dev_ctx, const DenseTensor& x, DenseTensor* out) const { - int r = xpu_activation_func( + int r = xpu_activation_func_with_max_x_y( dev_ctx, x, out, xpu::relu6); PADDLE_ENFORCE_XDNN_SUCCESS(r, "relu6"); } @@ -272,7 +297,7 @@ struct XPUSigmoidFunctor : public funcs::BaseActivationFunctor { void operator()(const Context& dev_ctx, const DenseTensor& x, DenseTensor* out) const { - int r = xpu_activation_func( + int r = xpu_activation_func_with_max_x_y( dev_ctx, x, out, xpu::sigmoid); PADDLE_ENFORCE_XDNN_SUCCESS(r, "sigmoid"); } @@ -363,7 +388,7 @@ struct XPUTanhFunctor : public funcs::BaseActivationFunctor { void operator()(const Context& dev_ctx, const DenseTensor& x, DenseTensor* out) const { - int r = xpu_activation_func( + int r = xpu_activation_func_with_max_x_y( dev_ctx, x, out, xpu::tanh); PADDLE_ENFORCE_XDNN_SUCCESS(r, "tanh"); } -- GitLab