From d5387de21f814d814ab0ca34342f3228d4d84e94 Mon Sep 17 00:00:00 2001 From: lzydev <1528794076@qq.com> Date: Tue, 29 Nov 2022 10:57:10 +0800 Subject: [PATCH] Generate static graph code for lerp by yaml (#48322) * generate static graph code for lerp by yaml, test=develop * modify the op_compat.yaml of lerp, test=develop * generate static graph code for lerp by yaml, test=develop * modify the op_compat.yaml of lerp, test=develop * remove the 'attrs' of lerp, test=develop Signed-off-by: lizhiyu02 <1528794076@qq.com> Signed-off-by: lizhiyu02 <1528794076@qq.com> --- paddle/fluid/operators/lerp_op.cc | 100 ----------------------- paddle/phi/api/yaml/backward.yaml | 10 +++ paddle/phi/api/yaml/legacy_backward.yaml | 10 --- paddle/phi/api/yaml/legacy_ops.yaml | 10 --- paddle/phi/api/yaml/op_compat.yaml | 7 ++ paddle/phi/api/yaml/ops.yaml | 10 +++ paddle/phi/ops/compat/lerp_sig.cc | 33 -------- 7 files changed, 27 insertions(+), 153 deletions(-) delete mode 100644 paddle/fluid/operators/lerp_op.cc delete mode 100644 paddle/phi/ops/compat/lerp_sig.cc diff --git a/paddle/fluid/operators/lerp_op.cc b/paddle/fluid/operators/lerp_op.cc deleted file mode 100644 index 84e82e29507..00000000000 --- a/paddle/fluid/operators/lerp_op.cc +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "paddle/fluid/framework/infershape_utils.h" -#include "paddle/fluid/framework/op_registry.h" -#include "paddle/phi/core/infermeta_utils.h" -#include "paddle/phi/infermeta/ternary.h" - -namespace paddle { -namespace operators { - -class LerpOp : public framework::OperatorWithKernel { - public: - using framework::OperatorWithKernel::OperatorWithKernel; -}; - -class LerpOpMaker : public framework::OpProtoAndCheckerMaker { - public: - void Make() override { - AddInput("X", "(Tensor), The input tensor of lerp op."); - AddInput("Y", "(Tensor), The input tensor of lerp op."); - AddInput("Weight", "(Tensor, optional), The input tensor of lerp op."); - AddOutput("Out", "(Tensor), The output tensor of lerp op."); - AddComment(R"DOC( -Lerp Operator. - -This operator is used to do a linear interpolation of input $X$ and $Y$ with $Weight$. - -The equation is: - -$$Out = X + Weight * (Y - X)$$ - -Both the input $X$ and $Y$ can carry the LoD (Level of Details) information, -or not. But the output only shares the LoD information with input $X$. - -)DOC"); - } -}; - -class LerpGradOp : public framework::OperatorWithKernel { - public: - using framework::OperatorWithKernel::OperatorWithKernel; - - void InferShape(framework::InferShapeContext* ctx) const override { - if (ctx->HasOutput(framework::GradVarName("X"))) { - ctx->SetOutputDim(framework::GradVarName("X"), ctx->GetInputDim("X")); - } - if (ctx->HasOutput(framework::GradVarName("Y"))) { - ctx->SetOutputDim(framework::GradVarName("Y"), ctx->GetInputDim("Y")); - } - } -}; - -template -class LerpOpGradMaker : public framework::SingleGradOpMaker { - public: - using framework::SingleGradOpMaker::SingleGradOpMaker; - - void Apply(GradOpPtr op) const override { - op->SetType("lerp_grad"); - op->SetInput("X", this->Input("X")); - op->SetInput("Y", this->Input("Y")); - op->SetInput("Weight", this->Input("Weight")); - op->SetInput("Out", this->Output("Out")); - op->SetInput(framework::GradVarName("Out"), this->OutputGrad("Out")); - op->SetOutput(framework::GradVarName("X"), this->InputGrad("X")); - op->SetOutput(framework::GradVarName("Y"), this->InputGrad("Y")); - op->SetAttrMap(this->Attrs()); - } -}; - -DECLARE_INPLACE_OP_INFERER(LerpInplaceInferer, {"X", "Out"}); - -} // namespace operators -} // namespace paddle - -DECLARE_INFER_SHAPE_FUNCTOR(lerp, - LerpInferShapeFunctor, - PD_INFER_META(phi::LerpInferMeta)); -REGISTER_OPERATOR( - lerp, - paddle::operators::LerpOp, - paddle::operators::LerpOpMaker, - paddle::operators::LerpOpGradMaker, - paddle::operators::LerpOpGradMaker, - paddle::operators::LerpInplaceInferer, - LerpInferShapeFunctor); - -REGISTER_OPERATOR(lerp_grad, paddle::operators::LerpGradOp); diff --git a/paddle/phi/api/yaml/backward.yaml b/paddle/phi/api/yaml/backward.yaml index a67e791c4d9..44afc43c046 100644 --- a/paddle/phi/api/yaml/backward.yaml +++ b/paddle/phi/api/yaml/backward.yaml @@ -588,6 +588,16 @@ backward : leaky_relu_double_grad inplace : (out_grad -> x_grad) +- backward_op : lerp_grad + forward : lerp (Tensor x, Tensor y, Tensor weight) -> Tensor(out) + args : (Tensor x, Tensor y, Tensor weight, Tensor out, Tensor out_grad) + output : Tensor(x_grad), Tensor(y_grad) + infer_meta : + func : GeneralBinaryGradInferMeta + param : [x, y] + kernel : + func : lerp_grad + - backward_op : lgamma_grad forward : lgamma(Tensor x) -> Tensor(out) args : (Tensor x, Tensor out_grad) diff --git a/paddle/phi/api/yaml/legacy_backward.yaml b/paddle/phi/api/yaml/legacy_backward.yaml index 17854d80c7a..814b3c89c01 100755 --- a/paddle/phi/api/yaml/legacy_backward.yaml +++ b/paddle/phi/api/yaml/legacy_backward.yaml @@ -749,16 +749,6 @@ no_need_buffer : bias optional : scale, bias -- backward_op : lerp_grad - forward : lerp (Tensor x, Tensor y, Tensor weight) -> Tensor(out) - args : (Tensor x, Tensor y, Tensor weight, Tensor out, Tensor out_grad) - output : Tensor(x_grad), Tensor(y_grad) - infer_meta : - func : GeneralBinaryGradInferMeta - param : [x, y] - kernel : - func : lerp_grad - - backward_op : linear_interp_grad forward : linear_interp (Tensor x, Tensor out_size, Tensor[] size_tensor, Tensor scale_tensor, str data_layout, int out_d, int out_h, int out_w, float[] scale, str interp_method, bool align_corners, int align_mode) -> Tensor(output) args : (Tensor x, Tensor out_size, Tensor[] size_tensor, Tensor scale_tensor, Tensor output_grad, str data_layout, int out_d, int out_h, int out_w, float[] scale, str interp_method, bool align_corners, int align_mode) diff --git a/paddle/phi/api/yaml/legacy_ops.yaml b/paddle/phi/api/yaml/legacy_ops.yaml index 5f1f55596ce..b506c41cdff 100755 --- a/paddle/phi/api/yaml/legacy_ops.yaml +++ b/paddle/phi/api/yaml/legacy_ops.yaml @@ -1039,16 +1039,6 @@ backward : layer_norm_grad optional : scale, bias -- op : lerp - args : (Tensor x, Tensor y, Tensor weight) - output : Tensor(out) - infer_meta : - func : LerpInferMeta - kernel : - func : lerp - inplace : (x -> out) - backward : lerp_grad - - op : less_equal args : (Tensor x, Tensor y) output : Tensor(out) diff --git a/paddle/phi/api/yaml/op_compat.yaml b/paddle/phi/api/yaml/op_compat.yaml index 0c59acbc988..5640ca7eb8b 100644 --- a/paddle/phi/api/yaml/op_compat.yaml +++ b/paddle/phi/api/yaml/op_compat.yaml @@ -680,6 +680,13 @@ extra : attrs : [bool use_mkldnn = false] +- op : lerp + backward : lerp_grad + inputs : + {x : X, y : Y, weight : Weight} + outputs : + out : Out + - op : lgamma inputs : x : X diff --git a/paddle/phi/api/yaml/ops.yaml b/paddle/phi/api/yaml/ops.yaml index 45fa68a3767..10b6645c616 100644 --- a/paddle/phi/api/yaml/ops.yaml +++ b/paddle/phi/api/yaml/ops.yaml @@ -563,6 +563,16 @@ func : leaky_relu backward : leaky_relu_grad +- op : lerp + args : (Tensor x, Tensor y, Tensor weight) + output : Tensor(out) + infer_meta : + func : LerpInferMeta + kernel : + func : lerp + inplace : (x -> out) + backward : lerp_grad + - op : lgamma args : (Tensor x) output : Tensor(out) diff --git a/paddle/phi/ops/compat/lerp_sig.cc b/paddle/phi/ops/compat/lerp_sig.cc deleted file mode 100644 index 154424468d6..00000000000 --- a/paddle/phi/ops/compat/lerp_sig.cc +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "paddle/phi/core/compat/op_utils.h" - -namespace phi { - -KernelSignature LerpOpArgumentMapping(const ArgumentMappingContext& ctx) { - return KernelSignature("lerp", {"X", "Y", "Weight"}, {}, {"Out"}); -} - -KernelSignature LerpGradOpArgumentMapping(const ArgumentMappingContext& ctx) { - return KernelSignature("lerp_grad", - {"X", "Y", "Weight", "Out", "Out@GRAD"}, - {}, - {"X@GRAD", "Y@GRAD"}); -} - -} // namespace phi - -PD_REGISTER_ARG_MAPPING_FN(lerp, phi::LerpOpArgumentMapping); -PD_REGISTER_ARG_MAPPING_FN(lerp_grad, phi::LerpGradOpArgumentMapping); -- GitLab