diff --git a/paddle/fluid/operators/squared_l2_norm_op.cc b/paddle/fluid/operators/squared_l2_norm_op.cc deleted file mode 100644 index 2e97f5b9b0dc27e372f731bb34c7184908031efe..0000000000000000000000000000000000000000 --- a/paddle/fluid/operators/squared_l2_norm_op.cc +++ /dev/null @@ -1,89 +0,0 @@ -/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. */ - -#include "paddle/fluid/framework/infershape_utils.h" -#include "paddle/fluid/framework/op_registry.h" -#include "paddle/phi/core/infermeta_utils.h" -#include "paddle/phi/infermeta/unary.h" - -namespace paddle { -namespace operators { - -class SquaredL2NormOp : public framework::OperatorWithKernel { - public: - using framework::OperatorWithKernel::OperatorWithKernel; -}; - -template -class SquaredL2NormGradOpMaker : public framework::SingleGradOpMaker { - public: - using framework::SingleGradOpMaker::SingleGradOpMaker; - - protected: - void Apply(GradOpPtr op) const override { - op->SetType("squared_l2_norm_grad"); - - op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out")); - op->SetInput("X", this->Input("X")); - - op->SetOutput(framework::GradVarName("X"), this->InputGrad("X")); - - op->SetAttrMap(this->Attrs()); - } -}; - -class SquaredL2NormGradOp : public framework::OperatorWithKernel { - public: - using framework::OperatorWithKernel::OperatorWithKernel; -}; - -class SquaredL2NormOpMaker : public framework::OpProtoAndCheckerMaker { - public: - void Make() override { - AddInput("X", "(Tensor) The input of squared_l2_norm op."); - AddOutput("Out", "(Scalar) The output of squared_l2_norm op."); - AddComment(R"DOC( -SquaredL2Norm Operator. - -Computes the squared L2 norm of a tensor. - -$$Out = \sum_{i} X_{i}^2$$ - -)DOC"); - } -}; - -} // namespace operators -} // namespace paddle - -namespace ops = paddle::operators; - -DECLARE_INFER_SHAPE_FUNCTOR(squared_l2_norm, - SquaredL2NormInferShapeFunctor, - PD_INFER_META(phi::SquaredL2NormInferMeta)); - -DECLARE_INFER_SHAPE_FUNCTOR(squared_l2_norm_grad, - SquaredL2NormGradInferShapeFunctor, - PD_INFER_META(phi::UnchangedInferMeta)); - -REGISTER_OPERATOR(squared_l2_norm, - ops::SquaredL2NormOp, - ops::SquaredL2NormOpMaker, - ops::SquaredL2NormGradOpMaker, - ops::SquaredL2NormGradOpMaker, - SquaredL2NormInferShapeFunctor); - -REGISTER_OPERATOR(squared_l2_norm_grad, - ops::SquaredL2NormGradOp, - SquaredL2NormGradInferShapeFunctor); diff --git a/paddle/phi/api/yaml/backward.yaml b/paddle/phi/api/yaml/backward.yaml index 7bf3b5cd2fcd89a6ba13b4c6990074765335c33d..d288f0bf18f6a874befc333bcbb3b2f9c6f885b7 100644 --- a/paddle/phi/api/yaml/backward.yaml +++ b/paddle/phi/api/yaml/backward.yaml @@ -1718,6 +1718,16 @@ backward : square_double_grad inplace : (out_grad -> x_grad) +- backward_op : squared_l2_norm_grad + forward : squared_l2_norm(Tensor x) -> Tensor(out) + args : (Tensor x, Tensor out_grad) + output : Tensor(x_grad) + infer_meta : + func : UnchangedInferMeta + param: [x] + kernel : + func : squared_l2_norm_grad + - backward_op : squeeze_double_grad forward : squeeze_grad(Tensor xshape, Tensor grad_out, IntArray axis) -> Tensor(grad_x) args : (Tensor grad_x_grad, IntArray axis) diff --git a/paddle/phi/api/yaml/legacy_backward.yaml b/paddle/phi/api/yaml/legacy_backward.yaml index 4e21865c23b3171791a993b51d9eee90a41e2bac..3a67b3e4a3e4631bf1fcb54d609a1b269d01c78e 100755 --- a/paddle/phi/api/yaml/legacy_backward.yaml +++ b/paddle/phi/api/yaml/legacy_backward.yaml @@ -962,16 +962,6 @@ invoke : concat( out_grad, axis) composite : split_grad(out_grad, axis, x_grad) -- backward_op : squared_l2_norm_grad - forward : squared_l2_norm(Tensor x) -> Tensor(out) - args : (Tensor x, Tensor out_grad) - output : Tensor(x_grad) - infer_meta : - func : UnchangedInferMeta - param: [x] - kernel : - func : squared_l2_norm_grad - - backward_op : strided_slice_grad forward : strided_slice (Tensor x, int[] axes, IntArray starts, IntArray ends, IntArray strides) -> Tensor(out) args : (Tensor x, Tensor out_grad, int[] axes, IntArray starts, IntArray ends, IntArray strides) diff --git a/paddle/phi/api/yaml/legacy_ops.yaml b/paddle/phi/api/yaml/legacy_ops.yaml index b075b1935e1bb35b6891448f4ea451ec3adea637..100329f555bea07e2cfcedd5219dea46de2025fb 100755 --- a/paddle/phi/api/yaml/legacy_ops.yaml +++ b/paddle/phi/api/yaml/legacy_ops.yaml @@ -1208,15 +1208,6 @@ func : split_with_num backward : split_with_num_grad -- op : squared_l2_norm - args : (Tensor x) - output : Tensor - infer_meta : - func : SquaredL2NormInferMeta - kernel : - func : squared_l2_norm - backward : squared_l2_norm_grad - - op : strided_slice args : (Tensor x, int[] axes, IntArray starts, IntArray ends, IntArray strides) output : Tensor diff --git a/paddle/phi/api/yaml/op_compat.yaml b/paddle/phi/api/yaml/op_compat.yaml index e53909aa3fdee9f556365bd4472300f237bc91ee..bfbab2d52af4ea04e05c7889111b5eac448556ef 100644 --- a/paddle/phi/api/yaml/op_compat.yaml +++ b/paddle/phi/api/yaml/op_compat.yaml @@ -2322,3 +2322,10 @@ {x: X, label: Label} outputs : out : Out + +- op: squared_l2_norm + backward: squared_l2_norm_grad + inputs : + x : X + outputs : + out : Out diff --git a/paddle/phi/api/yaml/ops.yaml b/paddle/phi/api/yaml/ops.yaml index aed95190bcfe59afa9b58b593f5b7c13ebdb3cfa..980505ddeb2f1dcb0e90262bd7e615aaf52292ac 100644 --- a/paddle/phi/api/yaml/ops.yaml +++ b/paddle/phi/api/yaml/ops.yaml @@ -1765,6 +1765,15 @@ square_sr {selected_rows -> selected_rows} backward : square_grad +- op : squared_l2_norm + args : (Tensor x) + output : Tensor(out) + infer_meta : + func : SquaredL2NormInferMeta + kernel : + func : squared_l2_norm + backward : squared_l2_norm_grad + - op : squeeze args : (Tensor x, IntArray axis={}) output : Tensor(out), Tensor(xshape) diff --git a/paddle/phi/ops/compat/squared_l2_norm_sig.cc b/paddle/phi/ops/compat/squared_l2_norm_sig.cc deleted file mode 100644 index 7b228008f2839d1402a5633e94b4d5316840fa2b..0000000000000000000000000000000000000000 --- a/paddle/phi/ops/compat/squared_l2_norm_sig.cc +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "paddle/phi/core/compat/op_utils.h" - -namespace phi { - -KernelSignature SquaredL2NormOpArgumentMapping( - const ArgumentMappingContext& ctx) { - return KernelSignature("squared_l2_norm", {"X"}, {}, {"Out"}); -} - -KernelSignature SquaredL2NormGradOpArgumentMapping( - const ArgumentMappingContext& ctx) { - return KernelSignature( - "squared_l2_norm_grad", {"X", "Out@GRAD"}, {}, {"X@GRAD"}); -} - -} // namespace phi - -PD_REGISTER_ARG_MAPPING_FN(squared_l2_norm, - phi::SquaredL2NormOpArgumentMapping); -PD_REGISTER_ARG_MAPPING_FN(squared_l2_norm_grad, - phi::SquaredL2NormGradOpArgumentMapping);