From 1ad95e9761936a38fa58bb2898530f437d689d2e Mon Sep 17 00:00:00 2001 From: huangjiyi <43315610+huangjiyi@users.noreply.github.com> Date: Fri, 11 Nov 2022 10:23:07 +0800 Subject: [PATCH] [PHI decoupling] remove dependency on 2 header files in fluid from phi (#47842) * rm "paddle/fluid/operators/eigen/eigen_function.h" in phi * rm "paddle/fluid/operators/elementwise/elementwise_op_function.h" in phi * Revert "rm "paddle/fluid/operators/elementwise/elementwise_op_function.h" in phi" This reverts commit c4ba51225e3652f1d80925afba406612968f0ee9. --- paddle/phi/kernels/cpu/scale_kernel.cc | 4 ++-- paddle/phi/kernels/funcs/reduce_grad_functions.h | 2 +- paddle/phi/kernels/gpu/dot_kernel.cu | 2 +- paddle/phi/kernels/impl/dot_grad_kernel_impl.h | 2 +- paddle/phi/kernels/impl/sign_kernel_impl.h | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/paddle/phi/kernels/cpu/scale_kernel.cc b/paddle/phi/kernels/cpu/scale_kernel.cc index 53a1391b0cf..a7aea9210a1 100644 --- a/paddle/phi/kernels/cpu/scale_kernel.cc +++ b/paddle/phi/kernels/cpu/scale_kernel.cc @@ -21,8 +21,8 @@ limitations under the License. */ #include "paddle/phi/kernels/funcs/eigen/common.h" // See Note [ Why still include the fluid headers? ] -#include "paddle/fluid/operators/eigen/eigen_function.h" #include "paddle/phi/common/bfloat16.h" +#include "paddle/phi/kernels/funcs/eigen/eigen_function.h" namespace phi { template @@ -43,7 +43,7 @@ void ScaleKernel(const Context& dev_ctx, if (x.numel() <= 0 || (!x.IsInitialized())) { return; } - paddle::operators::EigenScale, T>::Eval( + phi::funcs::EigenScale, T>::Eval( dev, eigen_out, eigen_x, diff --git a/paddle/phi/kernels/funcs/reduce_grad_functions.h b/paddle/phi/kernels/funcs/reduce_grad_functions.h index 11197a52261..3ab7618adec 100644 --- a/paddle/phi/kernels/funcs/reduce_grad_functions.h +++ b/paddle/phi/kernels/funcs/reduce_grad_functions.h @@ -14,11 +14,11 @@ #pragma once -#include "paddle/fluid/operators/eigen/eigen_function.h" #include "paddle/phi/backends/cpu/cpu_context.h" #include "paddle/phi/core/dense_tensor.h" #include "paddle/phi/kernels/cpu/reduce.h" #include "paddle/phi/kernels/funcs/eigen/common.h" +#include "paddle/phi/kernels/funcs/eigen/eigen_function.h" namespace phi { namespace funcs { diff --git a/paddle/phi/kernels/gpu/dot_kernel.cu b/paddle/phi/kernels/gpu/dot_kernel.cu index 4442396f6c9..144fc66e383 100644 --- a/paddle/phi/kernels/gpu/dot_kernel.cu +++ b/paddle/phi/kernels/gpu/dot_kernel.cu @@ -19,8 +19,8 @@ #include "paddle/phi/kernels/funcs/eigen/common.h" // See Note [ Why still include the fluid headers? ] -#include "paddle/fluid/operators/eigen/eigen_function.h" #include "paddle/phi/common/complex.h" +#include "paddle/phi/kernels/funcs/eigen/eigen_function.h" namespace phi { diff --git a/paddle/phi/kernels/impl/dot_grad_kernel_impl.h b/paddle/phi/kernels/impl/dot_grad_kernel_impl.h index 52d28e481b0..8987d221523 100644 --- a/paddle/phi/kernels/impl/dot_grad_kernel_impl.h +++ b/paddle/phi/kernels/impl/dot_grad_kernel_impl.h @@ -14,11 +14,11 @@ limitations under the License. */ #pragma once -#include "paddle/fluid/operators/eigen/eigen_function.h" #include "paddle/phi/core/dense_tensor.h" #include "paddle/phi/kernels/complex_kernel.h" #include "paddle/phi/kernels/funcs/complex_functors.h" #include "paddle/phi/kernels/funcs/eigen/common.h" +#include "paddle/phi/kernels/funcs/eigen/eigen_function.h" namespace phi { diff --git a/paddle/phi/kernels/impl/sign_kernel_impl.h b/paddle/phi/kernels/impl/sign_kernel_impl.h index fed59d73d9b..7d9e800e429 100644 --- a/paddle/phi/kernels/impl/sign_kernel_impl.h +++ b/paddle/phi/kernels/impl/sign_kernel_impl.h @@ -18,7 +18,7 @@ limitations under the License. */ #include "paddle/phi/kernels/funcs/eigen/common.h" // See Note [ Why still include the fluid headers? ] -#include "paddle/fluid/operators/eigen/eigen_function.h" +#include "paddle/phi/kernels/funcs/eigen/eigen_function.h" namespace phi { @@ -31,7 +31,7 @@ void SignKernel(const Context& dev_ctx, auto eigen_x = phi::EigenVector::Flatten(x); auto& dev = *dev_ctx.eigen_device(); - paddle::operators::EigenSign, T>::Eval( + phi::funcs::EigenSign, T>::Eval( dev, eigen_out, eigen_x); } -- GitLab